Exemplo n.º 1
0
    def test_Dataheap_Recorded_001_06(self):
        """Test method 'Recorded.importMetadata()' and 'Recorded.update()'
           in class 'Recorded' from 'dataheap'.
           Test Case:
           - get a recording
           - save the 'stars' value of the recording for later use
           - save the dictdata of the recording for later use
           - export the metadata to xml and save for later use
           - check xml metatdata structure for the 'stars' i.e. 'userrating' value
           - change the 'stars' value and save it for later use
           - update (save to database) the recording with the new 'stars' value
           - get the recording again to a new instance
           - check the updated 'stars' value
           - import the saved metadata back to the reocrding
           - check the reverted 'stars' value
           - check that the dictionary from the new Recorded instance is compatible to the original one:
           - update Recorded.stars to the original value
           - check for correct value of stars in final instance of Recoreded
        """
        chanid        = self.testenv['RECCHANID']
        starttimeutc  = self.testenv['RECSTARTTIMEUTC']
        starttimemyth = self.testenv['RECSTARTTIMEMYTH']
        title         = self.testenv['RECTITLE']
        basename      = self.testenv['RECBASENAME']
        recordedid    = self.testenv['RECRECORDID']
        inetref       = self.testenv['RECINETREF']
        stars         = self.testenv['RECSTARS']

        # Update database in case of any errors from previous test runs
        reczero= Recorded((chanid, starttimemyth), db = self.mydb)
        reczero.stars = stars
        reczero.update()

        rec = Recorded((chanid, starttimemyth), db = self.mydb)
        # save the 'stars' value i.e. 'userrating'
        recstars = rec.stars
        self.assertEqual("%.1f" %recstars, stars)
        # Recorded._origdata holds the dictionary pulled from database
        recdict = {}
        for key, value in rec._origdata.items():
            if isinstance(value, datetime):
                recdict[key] = value.mythformat()
            else:
                recdict[key] = value
        # export the metadata to xml and save for later use
        recmd = rec.exportMetadata()
        recmdxml = recmd.toXML()
        # check xml metadata structure for the 'stars' i.e. 'userrating' value
        # see https://www.mythtv.org/wiki/MythTV_Universal_Metadata_Format
        tree = recmdxml.getroottree()
        ### pprint(tree)    # lxml stuff
        recmdxml_stars = next(tree.iter('userrating')).text
        self.assertEqual("%.1f" %float(recmdxml_stars), stars)
        # change the 'stars' value and save it for later use
        rec.stars += 0.1
        recstars_updated = rec.stars
        # update (save to database) the recording with the new 'stars' value
        rec.update()
        # get the recording again to a new instance
        recnew = Recorded((chanid, starttimemyth), db = self.mydb)
        # check the updated 'stars' value
        self.assertEqual(recnew.stars, recstars_updated)
        # import the saved metadata back to the reocrding
        # Note: Recorded.importMetadata() make an implicit Recorded.update()
        recnew.importMetadata(recmd, overwrite=True)
        # check the reverted 'stars' value
        self.assertEqual("%.1f" %recnew.stars, stars)
        # check that the dictionary from the new Recorded instance is compatible to the original one:
        for key, value in recdict.items():
            if isinstance(recnew._origdata[key], datetime):
                # don't act on 'lastmodified' entry, because we changed the rec in between:
                if key != 'lastmodified':
                    self.assertEqual(recdict[key], recnew._origdata[key].mythformat())

        self.assertEqual(len(recdict), len(recnew._origdata))
        # update Recorded.stars to the original value
        recnew.stars = recstars
        recnew.update()
        # check for correct value of stars in final instance of Recoreded
        reclast = Recorded((chanid, starttimemyth), db = self.mydb)
        self.assertEqual("%.1f" %reclast.stars, stars)
Exemplo n.º 2
0
        rec.basename = os.path.basename(outfile)
        os.remove(infile)
        rec.filesize = os.path.getsize(outfile)
        rec.transcoded = 1
        rec.seek.clean()

        if flush_commskip:
            for index,mark in reversed(list(enumerate(rec.markup))):
                if mark.type in (rec.markup.MARK_COMM_START, 
                                 rec.markup.MARK_COMM_END):
                    del rec.markup[index]
            rec.bookmark = 0
            rec.cutlist = 0
            rec.markup.commit()

        rec.update()

        if build_seektable:
            try:
                task = System(path='mythcommflag', db=self.db())
                task.command('--chanid %s' % chanid,
                             '--starttime %s' % starttime)
            except MythError, e:
                self.log('Mythcommflag --chanid %s --starttime %s failed: %s' %
                         (chanid, starttime, str(e)), LOGLEVEL.ERR)

        if jobid:
            job.update({'status':272, 'comment':'Transcode Completed'})

def main():
    parser = OptionParser(usage="usage: %prog [options] [jobid]")
Exemplo n.º 3
0
    def test_Dataheap_Recorded_002_01(self):
        """Test creation of a Recoreded and
           writing/reading to the 'recordedrating' table.
           UUT: class DBDataRef
           Caution: recn.update() does not delete a removed entry from the 'recordedrating' table !
           Only recn.rating.clean() removes all entries.
        """

        chanid = self.testenv['DOWNCHANID']
        starttimemyth = self.testenv['DOWNSTARTTIME']

        rec = Recorded((chanid, starttimemyth), db=self.mydb)

        # Recorded.rating is a list of lists of tuples
        # [[(u'system', u'ABCD'), (u'rating', '08.15')], [(u'system', u'WXYZ'), (u'rating', u'0.11')]]

        # add ratings to the recorded instance:
        rec.rating.add(u'ABCD', u'41.98')
        rec.rating.add(u'WXYZ', u'0.11')

        # check the ratings:
        #print(rec.rating)
        s0_found = s1_found = False
        r0_found = r1_found = False
        for (s, r) in rec.rating:
            # print(s)
            # print(r)
            if s == u'ABCD':
                s0_found = True
            if s == u'WXYZ':
                s1_found = True
            if r == u'41.98':
                r0_found = True
            if r == u'0.11':
                r1_found = True
        self.assertTrue(s0_found)
        self.assertTrue(s1_found)
        self.assertTrue(r0_found)
        self.assertTrue(r1_found)

        # revert last changes:
        rec.rating.revert()
        # check for an empty list:
        #print(rec.rating)
        self.assertEqual(len(rec.rating), 0)

        # add ratings again:
        rec.rating.add('ABCD', '41.98')
        rec.rating.add('QWERTZ', 'blah')
        rec.rating.add('WXYZ', '0.11')
        # commit these updates:
        rec.update()

        # get the recorded data again:
        recn = Recorded((chanid, starttimemyth), db=self.mydb)
        # edit existing rating data:
        for i, (s, r) in enumerate(recn.rating):
            if s == 'ABCD':
                break
        if i is not None:
            recn.rating[i]['rating'] = u'08.15'
        # commit that change:
        recn.update()
        # check the changed value:
        #print(rec.rating)
        rn_found = False
        for (s, r) in recn.rating:
            if r == u'08.15':
                rn_found = True
        self.assertTrue(rn_found)

        # delete a rating:
        recn.rating.delete(u'WXYZ', u'0.11')
        recn.update()
        #print(recn.rating)
        sn_found = False
        for (s, r) in recn.rating:
            if s == u'WXYZ':
                sn_found = True
        self.assertFalse(sn_found)

        # clean all ratings for this recorded instance:
        recn.rating.clean()
        recn.update()
        self.assertEqual(len(recn.rating), 0)
Exemplo n.º 4
0
def runjob(jobid=None, chanid=None, starttime=None, tzoffset=None):
    global estimateBitrate
    db = MythDB()

    if jobid:
        job = Job(jobid, db=db)
        chanid = job.chanid
        utcstarttime = job.starttime
    else:
        job=None;
        #utcstarttime = datetime.strptime(starttime, "%Y%m%d%H%M%S%z")
        utcstarttime = parse(starttime)
        utcstarttime = utcstarttime + timedelta(hours=tzoffset)

    if debug:
        print('chanid "%s"' % chanid)
        print('utcstarttime "%s"' % utcstarttime)

    rec = Recorded((chanid, utcstarttime), db=db);
    utcstarttime = rec.starttime;
    starttime_datetime = utcstarttime
   
    # reformat 'starttime' for use with mythtranscode/ffmpeg/mythcommflag
    starttime = str(utcstarttime.utcisoformat().replace(':', '').replace(' ', '').replace('T', '').replace('-', ''))
    if debug:
        print('mythtv format starttime "%s"' % starttime)
    input_filesize = rec.filesize
    
    if rec.commflagged:
        if debug:
            print('Recording has been scanned to detect commerical breaks.')
        waititer=1
        keepWaiting = True
        while keepWaiting == True:
            keepWaiting=False;
            for index,jobitem in reversed(list(enumerate(db.searchJobs(chanid=chanid, starttime=starttime_datetime)))):
                if jobitem.type == jobitem.COMMFLAG:  # Commercial flagging job
                    if debug:
                        print('Commercial flagging job detected with status %s' % jobitem.status)
                    if jobitem.status == jobitem.RUNNING: # status = RUNNING?
                        job.update({'status':job.PAUSED, 
                                    'comment':'Waited %d secs for the commercial flagging job' % (waititer*POLL_INTERVAL) \
                                     + ' currently running on this recording to complete.'})
                        if debug:
                            print('Waited %d secs for the commercial flagging job' % (waititer*POLL_INTERVAL) \
                                  + ' currently running on this recording to complete.')
                        time.sleep(POLL_INTERVAL);
                        keepWaiting=True
                        waititer = waititer + 1
                        break
    else:
        if debug:
            print('Recording has not been scanned to detect/remove commercial breaks.')
        if require_commflagged:
            if jobid:
                job.update({'status':job.RUNNING, 'comment':'Required commercial flagging for this file is not found.'
                            + 'Flagging commercials and cancelling any queued commercial flagging.'})
            # cancel any queued job to flag commercials for this recording and run commercial flagging in this script
            for index,jobitem in reversed(list(enumerate(db.searchJobs(chanid=chanid,starttime=starttime_datetime)))):
                if debug:
                    if index==0:
                        print(list(jobitem.keys()))
                    print(index,jobitem.id,jobitem.chanid)

                if jobitem.type == jobitem.COMMFLAG:  # Commercial flagging job
                    if jobitem.status == jobitem.RUNNING: # status = RUNNING?
                        jobitem.cmds = jobitem.STOP # stop command from the frontend to stop the commercial flagging job
                    #jobitem.setStatus(jobitem.CANCELLED)
                    #jobitem.setComment('Cancelled: Transcode command ran commercial flagging for this recording.')
                    jobitem.update({'status':jobitem.CANCELLED, 
                                    'comment':'A user transcode job ran commercial flagging for'
                                    + ' this recording and cancelled this job.'})
            if debug:
                print('Flagging Commercials...')
            # Call "mythcommflag --chanid $CHANID --starttime $STARTTIME"
            task = System(path='mythcommflag', db=db)
            try:
                output = task('--chanid "%s"' % chanid,
                              '--starttime "%s"' % starttime,
                              '2> /dev/null')
            except MythError as e:
                # it seems mythcommflag always exits with an decoding error "eno: Unknown error 541478725 (541478725)"
                pass
                #print 'Command failed with output:\n%s' % e.stderr
                #if jobid:
                #    job.update({'status':304, 'comment':'Flagging commercials failed'})
                #sys.exit(e.retcode)


    sg = findfile(rec.basename, rec.storagegroup, db=db)
    if sg is None:
        print('Local access to recording not found.')
        sys.exit(1)

    infile = os.path.join(sg.dirname, rec.basename)
    tmpfile = '%s.tmp' % infile.rsplit('.',1)[0]
 #   tmpfile = infile
    outfile = '%s.mp4' % infile.rsplit('.',1)[0]
    if debug:
        print('tmpfile "%s"' % tmpfile)


    clipped_bytes=0;
    # If selected, create a cutlist to remove commercials via mythtranscode by running:
    # mythutil --gencutlist --chanid $CHANID --starttime $STARTTIME
    if generate_commcutlist:
        if jobid:
            job.update({'status':job.RUNNING, 'comment':'Generating Cutlist for commercial removal'})
        task = System(path='mythutil', db=db)
        try:
            output = task('--gencutlist',
                          '--chanid "%s"' % chanid,
                          '--starttime "%s"' % starttime)
#                          '--loglevel debug',
#                          '2> /dev/null')
        except MythError as e:
            print('Command "mythutil --gencutlist" failed with output:\n%s' % e.stderr)
            if jobid:
                job.update({'status':job.ERRORED, 'comment':'Generation of commercial Cutlist failed'})
            sys.exit(e.retcode)

    # Lossless transcode to strip cutlist
    if generate_commcutlist or rec.cutlist==1:
        if jobid:
            job.update({'status':job.RUNNING, 'comment':'Removing Cutlist'})
        task = System(path='mythtranscode', db=db)
        try:
            output = task('--chanid "%s"' % chanid,
                          '--starttime "%s"' % starttime,
                          '--mpeg2',
                          '--honorcutlist',
                          '-o "%s"' % tmpfile,
                          '1>&2')
#                          '2> /dev/null')
            clipped_filesize = os.path.getsize(tmpfile)
            clipped_bytes = input_filesize - clipped_filesize
            clipped_compress_pct = float(clipped_bytes)/input_filesize 
            rec.commflagged = 0
        except MythError as e:
            print('Command "mythtranscode --honorcutlist" failed with output:\n%s' % e.stderr)
            if jobid:
                job.update({'status':job.ERRORED, 'comment':'Removing Cutlist failed. Copying file instead.'})
#            sys.exit(e.retcode)
            copyfile('%s' % infile, '%s' % tmpfile)
            clipped_filesize = input_filesize
            clipped_bytes = 0
            clipped_compress_pct = 0
            pass
    else:
        if jobid:
            job.update({'status':job.RUNNING, 'comment':'Creating temporary file for transcoding.'})
        copyfile('%s' % infile, '%s' % tmpfile)
        clipped_filesize = input_filesize
        clipped_bytes = 0
        clipped_compress_pct = 0

    duration_secs = 0
    # Estimate bitrate, and detect duration and number of frames
    if estimateBitrate:
        if jobid:
            job.update({'status':job.RUNNING, 'comment':'Estimating bitrate; detecting frames per second, and resolution.'})

        duration_secs, e = get_duration(db, rec, transcoder, tmpfile);
        if duration_secs>0:
            bitrate = int(clipped_filesize*8/(1024*duration_secs))
        else:
            print('Estimate bitrate failed falling back to constant rate factor encoding.\n')
            estimateBitrate = False
            duration_secs = 0
        print(e.stderr.decode('utf-8'))
        # get framerate of mpeg2 video stream and detect if stream is HD
        r = re.compile('mpeg2video (.*?) fps,')
        m = r.search(e.stderr.decode('utf-8'))
        strval = m.group(1)
        if debug:
            print(strval)
        isHD = False
        if "1920x1080" in strval or "1280x720" in strval or "2560x1440" in strval:
            if debug:
                print('Stream is HD')
            isHD = True
        else:
            if debug:
                print('Stream is not HD')
        framerate = float(m.group(1).split(' ')[-1])
        if debug:
            print('Framerate %s' % framerate)

    # Setup transcode video bitrate and quality parameters
    # if estimateBitrate is true and the input content is HD:
    #     encode 'medium' preset and vbitrate = inputfile_bitrate*compressionRatio
    # else:
    #     encode at user default preset and constant rate factor ('slow' and 20) 
    preset = preset_nonHD
    if estimateBitrate:
        if isHD:
            h264_bitrate = int(bitrate*compressionRatio)
            # HD coding with specified target bitrate (CRB encoding)
            if hdvideo_tgt_bitrate > 0 and h264_bitrate > hdvideo_tgt_bitrate:
                h264_bitrate = hdvideo_tgt_bitrate;
                vbitrate_param = '-b:v %dk' % h264_bitrate
            else:   # HD coding with disabled or acceptable target bitrate (CRF encoding)
                vbitrate_param = '-crf:v %s' % crf
            preset = preset_HD
        else: # non-HD encoding (CRF encoding)
            vbitrate_param = '-crf:v %s' % crf            
    else:
        vbitrate_param = '-crf:v %s' % crf
    if hdvideo_min_bitrate > 0:
        vbitrate_param = vbitrate_param + ' -minrate %sk' % hdvideo_min_bitrate
    if hdvideo_max_bitrate > 0:
        vbitrate_param = vbitrate_param + ' -maxrate %sk' % hdvideo_max_bitrate
    if hdvideo_max_bitrate > 0 or hdvideo_min_bitrate > 0:
        vbitrate_param = vbitrate_param + ' -bufsize %sk' % device_bufsize

    if debug:
        print('Video bitrate parameter "%s"' % vbitrate_param)
        print('Video h264 preset parameter "%s"' % preset)

    # Setup transcode audio bitrate and quality parameters
    # Right now, the setup is as follows:
    # if input is HD: 
    #    copy audio streams to output, i.e., input=output audio
    # else:
    #    output is libfdk_aac encoded at 128kbps 
    if isHD:
        abitrate_param = abitrate_param_HD  # preserve 5.1 audio
    else:
        abitrate_param = abitrate_param_nonHD
    if debug:
        print('Audio bitrate parameter "%s"' % abitrate_param)

    # Transcode to mp4
#    if jobid:
#        job.update({'status':4, 'comment':'Transcoding to mp4'})

    # ffmpeg output is redirected to the temporary file tmpstatusfile and
    # a second thread continuously reads this file while
    # the transcode is in-process. see while loop below for the monitoring thread
    tf = tempfile.NamedTemporaryFile()
    tmpstatusfile = tf.name
#    tmpstatusfile = '/tmp/ffmpeg-transcode.txt'
    if debug:
        print('Using temporary file "%s" for ffmpeg status updates.' % tmpstatusfile)
    res = []
    # create a thread to perform the encode
    ipq = queue.Queue()
    t = threading.Thread(target=wrapper, args=(encode, 
                        (jobid, db, job, ipq, preset, vbitrate_param, abitrate_param,
                         tmpfile, outfile, tmpstatusfile,), res))
    t.start()
    # wait for ffmpeg to open the file and emit its initialization information 
    # before we start the monitoring process
    time.sleep(1) 
    # open the temporary file having the ffmeg output text and process it to generate status updates
    hangiter=0;
    with open(tmpstatusfile) as f:
        # read all the opening ffmpeg status/analysis lines
        lines = f.readlines()
        # set initial progress to -1
        prev_progress=-1
        framenum=0
        fps=1.0
        while t.is_alive():
            # read all output since last readline() call
            lines = f.readlines()
            if len(lines) > 0:
                # every ffmpeg output status line ends with a carriage return '\r'
                # split the last read line at these locations
                lines=lines[-1].split('\r')
#                if debug:
#                    print lines;
                hangiter=0
                if len(lines) > 1 and lines[-2].startswith('frame'):
                    # since typical reads will have the last line ending with \r the last status
                    # message is at index=[-2] start processing this line
                    # replace multiple spaces with one space
                    lines[-2] = re.sub(' +',' ',lines[-2])
                    # remove any spaces after equals signs
                    lines[-2] = re.sub('= +','=',lines[-2])
                    # split the fields at the spaces the first two fields for typical
                    # status lines will be framenum=XXXX and fps=YYYY parse the values
                    values = lines[-2].split(' ')
                    if len(values) > 1:
                        if debug:
                            print('values %s' % values)
                        prev_framenum = framenum
                        prev_fps = fps
                        try:
                            # framenum = current frame number being encoded
                            framenum = int(values[0].split('=')[1])
                            # fps = frames per second for the encoder
                            fps = float(values[1].split('=')[1])
                        except ValueError as e:
                            print('ffmpeg status parse exception: "%s"' % e)
                            framenum = prev_framenum
                            fps = prev_fps
                            pass
                    # progress = 0-100 represent percent complete for the transcode
                    progress = int((100*framenum)/(duration_secs*framerate))
                    # eta_secs = estimated number of seconds until transcoding is complete
                    eta_secs = int((float(duration_secs*framerate)-framenum)/fps)
                    # pct_realtime = how many real seconds it takes to encode 1 second of video
                    pct_realtime = float(fps/framerate) 
                    if debug:
                        print('framenum = %d fps = %.2f' % (framenum, fps))                
                    if progress != prev_progress:
                        if debug:
                            print('Progress %d%% encoding %.1f frames per second ETA %d mins' \
                                  % ( progress, fps, float(eta_secs)/60))
                        if jobid:
                            progress_str = 'Transcoding to mp4 %d%% complete ETA %d mins fps=%.1f.' \
                                  % ( progress, float(eta_secs)/60, fps)
                            job.update({'status':job.RUNNING, 'comment': progress_str})
                        prev_progress = progress
                elif len(lines) > 1:
                    if debug:
                        print('Read pathological output %s' % lines[-2])
            else:
                if debug:
                    print('Read no lines of ffmpeg output for %s secs. Possible hang?' % (POLL_INTERVAL*hangiter))
                hangiter = hangiter + 1
                if jobid:
                    progress_str = 'Read no lines of ffmpeg output for %s secs. Possible hang?' % (POLL_INTERVAL*hangiter)
                    job.update({'status':job.RUNNING, 'comment': progress_str})
            time.sleep(POLL_INTERVAL)
        if debug:
            print('res = "%s"' % res)

    t.join(1)
    try:
        if ipq.get_nowait() == CleanExit:
            sys.exit()
    except queue.Empty:
        pass

    if flush_commskip:
        task = System(path='mythutil')
        task.command('--chanid %s' % chanid,
                     '--starttime %s' % starttime,
                     '--clearcutlist',
                     '2> /dev/null')
        task = System(path='mythutil')
        task.command('--chanid %s' % chanid,
                     '--starttime %s' % starttime,
                     '--clearskiplist',
                     '2> /dev/null')

    if flush_commskip:
        for index,mark in reversed(list(enumerate(rec.markup))):
            if mark.type in (rec.markup.MARK_COMM_START, rec.markup.MARK_COMM_END):
                del rec.markup[index]
        rec.bookmark = 0
        rec.cutlist = 0
        rec.markup.commit()

#    tf.close();
#    os.remove(tmpstatusfile);
    rec.basename = os.path.basename(outfile)
    rec.filesize = os.path.getsize(outfile)
#    rec.commflagged = 0
    rec.transcoded = 1
    rec.seek.clean()
    rec.update()

    os.remove(infile)
    # Cleanup the old *.png files
    for filename in glob('%s*.png' % infile):
        os.remove(filename)
    os.remove(tmpfile)
    try:
        os.remove('%s.map' % tmpfile)
    except OSError:
        pass

    output_filesize = rec.filesize
    if duration_secs > 0:
        output_bitrate = int(output_filesize*8/(1024*duration_secs)) # kbps
    actual_compression_ratio = 1 - float(output_filesize)/clipped_filesize
    compressed_pct = 1 - float(output_filesize)/input_filesize

    if build_seektable:
        if jobid:
            job.update({'status':job.RUNNING, 'comment':'Rebuilding seektable'})
        task = System(path='mythcommflag')
        task.command('--chanid %s' % chanid,
                     '--starttime %s' % starttime,
                     '--rebuild',
                     '2> /dev/null')

    # fix during in the recorded markup table this will be off if commercials are removed
    duration_msecs, e = get_duration(db, rec, transcoder, outfile)
    duration_msecs = 1000*duration_msecs
    for index,mark in reversed(list(enumerate(rec.markup))):
        # find the duration markup entry and correct any error in the video duration that might be there
        if mark.type == 33:
            if debug:
                print('Markup Duration in milliseconds "%s"' % mark.data)
            error = mark.data - duration_msecs
            if error != 0:
                if debug:
                    print('Markup Duration error is "%s"msecs' % error)
                mark.data = duration_msecs
                #rec.bookmark = 0
                #rec.cutlist = 0
                rec.markup.commit()

    if jobid:
        if output_bitrate:
            job.update({'status':job.FINISHED, 'comment':'Transcode Completed @ %dkbps, compressed file by %d%% (clipped %d%%, transcoder compressed %d%%)' % (output_bitrate,int(compressed_pct*100),int(clipped_compress_pct*100),int(actual_compression_ratio*100))})
        else:
            job.update({'status':job.FINISHED, 'comment':'Transcode Completed'})
Exemplo n.º 5
0
        rec = Recorded((chanid, starttime), db=db)

    if not os.path.isfile(outfile):
        if jobid:
            job.update({'status': 304, 'comment': 'Transcoded file not found'})
        print 'Error: Transcoded file (%s) not found!' % outfile
        sys.exit(2)

    print 'Updating recording in MythTV DB, set transcoded'
    if jobid:
        job.update({'status': 4, 'comment': 'Updating database'})
    rec.basename = os.path.basename(outfile)
    rec.filesize = os.path.getsize(outfile)
    rec.transcoded = 1
    rec.seek.clean()
    rec.update()

    if flush_commskip:
        print 'Flushing commskip list'
        if jobid:
            job.update({'status': 4, 'comment': 'Flushing commskip'})
        for index, mark in reversed(list(enumerate(rec.markup))):
            if mark.type in (rec.markup.MARK_COMM_START,
                             rec.markup.MARK_COMM_END):
                del rec.markup[index]
        rec.bookmark = 0
        rec.cutlist = 0
        rec.markup.commit()
        rec.update()

    if build_seektable: