Exemple #1
0
def link_all():
    db = MythDB()
    channels = get_channels(db)
    if opts.live:
        recs = db.searchRecorded(livetv=True)
    else:
        recs = db.searchRecorded()
    targets = {}
    for rec in recs:
        details = get_link_details(rec, channels)
        if details is None:
            continue
        source, dest, destfile = details
        if dest not in targets:
            targets[dest] = {}
        targets[dest][destfile] = source

    for (path, dirs, files) in os.walk(opts.dest, topdown=False):
        dir = path.split("/")[-1]
        if dir == "":
            continue
        if dir not in targets:
            for fname in files:
                if not os.path.islink(path + "/" + fname):
                    raise Exception("Found non link - " + path + "/" + fname)
                os.unlink(path + "/" + fname)
            os.rmdir(path)
            continue
        else:
            for fname in files:
                print dir, fname
                if dir not in targets or fname not in targets[dir]:
                    if not os.path.islink(path + "/" + fname):
                        raise Exception("Found non link - " + path + "/" +
                                        fname)
                    os.unlink(path + "/" + fname)
                else:
                    del targets[dir][fname]
                    print targets[dir]
                    if len(targets[dir]) == 0:
                        del targets[dir]
    for dir in targets:
        if not os.path.exists(opts.dest + dir):
            os.mkdir(opts.dest + dir)
        for fname in targets[dir]:
            os.symlink(targets[dir][fname],
                       opts.dest + "/" + dir + "/" + fname)
def link_all():
    # removing old content
    for path,dirs,files in os.walk(opts.dest, topdown=False):
        for fname in files:
            tmppath = os.path.join(path, fname)
            if not os.path.islink(tmppath):
                raise Exception('Non-link file found in destination path.')
            os.unlink(tmppath)
        os.rmdir(path)

    db = MythDB()
    if opts.live:
        recs = db.searchRecorded(livetv=True)
    else:
        recs = db.searchRecorded()
    for rec in recs:
        gen_link(rec)
Exemple #3
0
def link_all():
    db = MythDB()
    channels = get_channels(db)
    if opts.live:
        recs = db.searchRecorded(livetv=True)
    else:
        recs = db.searchRecorded()
    targets = {}
    for rec in recs:
        details = get_link_details(rec, channels)
        if details is None:
             continue
        source, dest, destfile = details
        if dest not in targets:
             targets[dest] = {}
        targets[dest][destfile] = source

    for (path, dirs, files) in os.walk(opts.dest, topdown=False):
        dir = path.split("/")[-1]
        if dir == "":
            continue
        if dir not in targets:
            for fname in files:
	    	if not os.path.islink(path + "/" + fname):
                    raise Exception("Found non link - " + path + "/" + fname)
                os.unlink(path + "/" + fname)
            os.rmdir(path)
            continue
        else:
            for fname in files:
                print dir, fname
                if dir not in targets or fname not in targets[dir]:
                    if not os.path.islink(path + "/" + fname):
                        raise Exception("Found non link - " + path + "/" + fname)
                    os.unlink(path + "/" + fname)
                else:
                    del targets[dir][fname]
                    print targets[dir]
                    if len(targets[dir]) == 0:
                        del targets[dir]
    for dir in targets:
        if not os.path.exists(opts.dest + dir):
            os.mkdir(opts.dest + dir)
        for fname in targets[dir]:
            os.symlink(targets[dir][fname], opts.dest + "/" + dir + "/" + fname)
Exemple #4
0
def findrecs(title):
    db = MythDB()
    if title is None:
        print 'title is the only supported query right now...'
        return None
    recs = db.searchRecorded(title=title)
    tobedone = []
    for rec in recs:
        #print 'Checking rec:'
        #print rec
        if rec.basename.endswith('mpg'):
            tobedone.append(rec)
        else:
            print 'Skipping non mpg: %s' % rec.basename
    return tobedone 
Exemple #5
0
#!/usr/bin/env python

from MythTV import MythDB
import sys
out = "/var/lib/mythtv/recordings/recordings.txt"

if __name__ == "__main__":
	try:
		db = MythDB()
	except:
		print("Unable to connect to the MythTV database, aborting.")
		sys.exit(1)

	try:
		mylist = list()
		recs = db.searchRecorded()
		for recording in recs:
			try:
				y = recording.getProgram()
				_when = y["starttime"]
				a = "{Title}###---###{description}###---###{filename}###---###{filesize}###---###{starttime}".format(
						Title=y["title"],
						description=y["description"],
						filename = y["filename"],
						filesize = y["filesize"],
						starttime = _when
					)
				add_this = (_when.timestamp(), a)
				mylist.append(add_this)
			except:
				print("Skipping", recording)
Exemple #6
0
        help="""Specify that LiveTV recordings are to be linked as well.  Default is to
                only process links for scheduled recordings.""")
parser.add_option("--format", action="store", dest="format",
        help="""Specify the output format to be used to generate link paths.""")
parser.add_option("--underscores", action="store", dest="underscores",
        help="""Replace whitespace in filenames with underscore characters.""")
parser.add_option('-v', '--verbose', action='store', type='string', dest='verbose',
        help='Verbosity level')

opts, args = parser.parse_args()

if opts.dest is None:
    opts.dest = '/mnt/nfs/mythtv/plex/'
if opts.format is None:
    format = '%T/%T - %pY%-%pm%-%pd %ph:%pi:%ps - %S (%c)'
if opts.jobid:
    db = MythDB()
    job = Job(opts.jobid, db=db)
    rec = Recorded((job.chanid, job.starttime), db=db)
    gen_link(rec)
elif opts.chanid and opts.starttime:
    rec = Recorded((opts.chanid, opts.starttime))
    gen_link(rec)
elif opts.filename:
    db = MythDB()
    rec = db.searchRecorded(basename=opts.filename)
    gen_link(rec)
else:
    link_all()

Exemple #7
0
class test_Dataheap_Recorded_001(unittest.TestCase):
    """Test class 'Recorded' from 'dataheap'.
       This test uses hardcoded values from the file '.testenv'.
    """

    @classmethod
    def setUpClass(cls):
        # get the global test environment
        global TestEnv
        cls.testenv = TestEnv

    def setUp(self):
        with add_log_flags():
            self.mydb = MythDB()
            self.mybe = MythBE(db=self.mydb)

    def tearDown(self):
        if os.path.exists('/tmp/my_logfile'):
            os.remove('/tmp/my_logfile')

    def test_Dataheap_Recorded_001_01(self):
        """Test property 'artwork' in class 'Recorded' from 'dataheap'.
        """
        chanid        = self.testenv['RECCHANID']
        starttimeutc  = self.testenv['RECSTARTTIMEUTC']
        starttimemyth = self.testenv['RECSTARTTIMEMYTH']
        title         = self.testenv['RECTITLE']
        basename      = self.testenv['RECBASENAME']
        recordedid    = self.testenv['RECRECORDID']
        inetref       = self.testenv['RECINETREF']

        rec = Recorded((chanid, starttimemyth), db = self.mydb)
        artwork = rec.artwork
        self.assertTrue(isinstance(artwork, RecordedArtwork))
        self.assertTrue(isinstance(artwork.coverart, Artwork))

    def test_Dataheap_Recorded_001_02(self):
        """Test method 'getProgram()' in class 'Recorded' from 'dataheap'.
        """
        chanid        = self.testenv['RECCHANID']
        starttimeutc  = self.testenv['RECSTARTTIMEUTC']
        starttimemyth = self.testenv['RECSTARTTIMEMYTH']
        title         = self.testenv['RECTITLE']
        basename      = self.testenv['RECBASENAME']
        recordedid    = self.testenv['RECRECORDID']
        inetref       = self.testenv['RECINETREF']

        rec = Recorded((chanid, starttimemyth), db = self.mydb)
        prgrm = rec.getProgram()
        self.assertTrue(isinstance(prgrm, Program))
        self.assertEqual(RECSTATUS.rsRecorded, prgrm.rsRecorded)


    def test_Dataheap_Recorded_001_03(self):
        """Test method 'getRecordedProgram()' in class 'Recorded' from 'dataheap'.
        """
        chanid        = self.testenv['RECCHANID']
        starttimeutc  = self.testenv['RECSTARTTIMEUTC']
        starttimemyth = self.testenv['RECSTARTTIMEMYTH']
        title         = self.testenv['RECTITLE']
        basename      = self.testenv['RECBASENAME']
        recordedid    = self.testenv['RECRECORDID']
        inetref       = self.testenv['RECINETREF']

        rec = Recorded((chanid, starttimemyth), db = self.mydb)
        prgrm = rec.getRecordedProgram()
        self.assertEqual(prgrm.chanid, int(chanid))


    def test_Dataheap_Recorded_001_04(self):
        """Test method 'formatPath()' in class 'Recorded' from 'dataheap'.
        """
        chanid        = self.testenv['RECCHANID']
        starttimeutc  = self.testenv['RECSTARTTIMEUTC']
        starttimemyth = self.testenv['RECSTARTTIMEMYTH']
        title         = self.testenv['RECTITLE']
        basename      = self.testenv['RECBASENAME']
        recordedid    = self.testenv['RECRECORDID']
        inetref       = self.testenv['RECINETREF']

        rec = Recorded((chanid, starttimemyth), db = self.mydb)
        formatpath = rec.formatPath("%U/%T/%pY-%pm-%pd %pH.%pi %T")
        ###print(rec.formatPath("%U/%T/%pY-%pm-%pd %pH.%pi %T"))
        self.assertTrue(title in formatpath)


    def test_Dataheap_Recorded_001_05(self):
        """Test method 'exportMetadata()' in class 'Recorded' from 'dataheap'.
        """
        chanid        = self.testenv['RECCHANID']
        starttimeutc  = self.testenv['RECSTARTTIMEUTC']
        starttimemyth = self.testenv['RECSTARTTIMEMYTH']
        title         = self.testenv['RECTITLE']
        basename      = self.testenv['RECBASENAME']
        recordedid    = self.testenv['RECRECORDID']
        inetref       = self.testenv['RECINETREF']

        rec = Recorded((chanid, starttimemyth), db = self.mydb)
        metadata = rec.exportMetadata()
        self.assertTrue(isinstance(metadata, VideoMetadata))
        self.assertEqual(metadata.inetref, inetref)


    def test_Dataheap_Recorded_001_06(self):
        """Test method 'Recorded.importMetadata()' and 'Recorded.update()'
           in class 'Recorded' from 'dataheap'.
           Test Case:
           - get a recording
           - save the 'stars' value of the recording for later use
           - save the dictdata of the recording for later use
           - export the metadata to xml and save for later use
           - check xml metatdata structure for the 'stars' i.e. 'userrating' value
           - change the 'stars' value and save it for later use
           - update (save to database) the recording with the new 'stars' value
           - get the recording again to a new instance
           - check the updated 'stars' value
           - import the saved metadata back to the reocrding
           - check the reverted 'stars' value
           - check that the dictionary from the new Recorded instance is compatible to the original one:
           - update Recorded.stars to the original value
           - check for correct value of stars in final instance of Recoreded
        """
        chanid        = self.testenv['RECCHANID']
        starttimeutc  = self.testenv['RECSTARTTIMEUTC']
        starttimemyth = self.testenv['RECSTARTTIMEMYTH']
        title         = self.testenv['RECTITLE']
        basename      = self.testenv['RECBASENAME']
        recordedid    = self.testenv['RECRECORDID']
        inetref       = self.testenv['RECINETREF']
        stars         = self.testenv['RECSTARS']

        # Update database in case of any errors from previous test runs
        reczero= Recorded((chanid, starttimemyth), db = self.mydb)
        reczero.stars = stars
        reczero.update()

        rec = Recorded((chanid, starttimemyth), db = self.mydb)
        # save the 'stars' value i.e. 'userrating'
        recstars = rec.stars
        self.assertEqual("%.1f" %recstars, stars)
        # Recorded._origdata holds the dictionary pulled from database
        recdict = {}
        for key, value in rec._origdata.items():
            if isinstance(value, datetime):
                recdict[key] = value.mythformat()
            else:
                recdict[key] = value
        # export the metadata to xml and save for later use
        recmd = rec.exportMetadata()
        recmdxml = recmd.toXML()
        # check xml metadata structure for the 'stars' i.e. 'userrating' value
        # see https://www.mythtv.org/wiki/MythTV_Universal_Metadata_Format
        tree = recmdxml.getroottree()
        ### pprint(tree)    # lxml stuff
        recmdxml_stars = next(tree.iter('userrating')).text
        self.assertEqual("%.1f" %float(recmdxml_stars), stars)
        # change the 'stars' value and save it for later use
        rec.stars += 0.1
        recstars_updated = rec.stars
        # update (save to database) the recording with the new 'stars' value
        rec.update()
        # get the recording again to a new instance
        recnew = Recorded((chanid, starttimemyth), db = self.mydb)
        # check the updated 'stars' value
        self.assertEqual(recnew.stars, recstars_updated)
        # import the saved metadata back to the reocrding
        # Note: Recorded.importMetadata() make an implicit Recorded.update()
        recnew.importMetadata(recmd, overwrite=True)
        # check the reverted 'stars' value
        self.assertEqual("%.1f" %recnew.stars, stars)
        # check that the dictionary from the new Recorded instance is compatible to the original one:
        for key, value in recdict.items():
            if isinstance(recnew._origdata[key], datetime):
                # don't act on 'lastmodified' entry, because we changed the rec in between:
                if key != 'lastmodified':
                    self.assertEqual(recdict[key], recnew._origdata[key].mythformat())

        self.assertEqual(len(recdict), len(recnew._origdata))
        # update Recorded.stars to the original value
        recnew.stars = recstars
        recnew.update()
        # check for correct value of stars in final instance of Recoreded
        reclast = Recorded((chanid, starttimemyth), db = self.mydb)
        self.assertEqual("%.1f" %reclast.stars, stars)


    def test_Dataheap_Recorded_001_07(self):
        """Test methods 'db.searchRecorded and Recorded.getRecordedFile()'
           in class 'Recorded' from 'dataheap'.
        """
        chanid        = self.testenv['RECCHANID']
        starttimeutc  = self.testenv['RECSTARTTIMEUTC']
        starttimemyth = self.testenv['RECSTARTTIMEMYTH']
        title         = self.testenv['RECTITLE']
        basename      = self.testenv['RECBASENAME']
        recordedid    = self.testenv['RECRECORDID']
        inetref       = self.testenv['RECINETREF']
        recordedid    = self.testenv['RECRECORDEDID']
        fps           = self.testenv['RECFPS']

        reciter = self.mydb.searchRecorded(recordedid = recordedid)
        rec     = next(reciter)
        recfile = rec.getRecordedFile()

        # test '__repr__' and '__str__'
        print()
        print(repr(recfile))
        print(str(recfile))
        print(repr(recfile.values()))
        print(str(recfile.values()))
        print(repr(recfile.keys()))
        print(str(recfile.keys()))
        print(repr(recfile.fps))
        print(str(recfile.fps))

        self.assertEqual("%.1f" %recfile.fps, fps)
class populate( object ):
    __metaclass__ = Singleton
    def __init__(self, host=None):
        self.db = MythDB()
        self.db.searchRecorded.handler = Recorded
        self.be = MythBE(db=self.db)
        self.log = MythLog(db=self.db)

        self.set_host(host)
        self.load_backends()
        self.load_storagegroups()

    def set_host(self, host):
        self.host = host
        if host:
            # if the host was defined on the command line, check
            # to make sure such host is defined in the database
            with self.db as c:
                c.execute("""SELECT count(1) FROM settings
                             WHERE hostname=? AND value=?""",
                            (host, 'BackendServerIP'))
                if c.fetchone()[0] == 0:
                    raise Exception('Invalid hostname specified for backend.')

    def load_backends(self):
        with self.db as c:
            c.execute("""SELECT hostname FROM settings
                         WHERE value='BackendServerIP'""")
            hosts = [r[0] for r in c.fetchall()]
        self.hosts = []
        for host in hosts:
            # try to access all defined hosts, and
            # store the ones currently accessible
            try:
                MythBE(backend=host)
                self.hosts.append(host)
            except:
                pass

    def load_storagegroups(self):
        self.storagegroups = \
            [sg for sg in self.db.getStorageGroup() \
                if sg.groupname not in ('Videos','Banners','Coverart',\
                                        'Fanart','Screenshots','Trailers')]

    def flush(self):
        self.misplaced = []
        self.zerorecs = []
        self.pendrecs = []
        self.orphrecs = []
        self.orphvids = []
        self.orphimgs = []
        self.dbbackup = []
        self.unfiltered = []

    def __call__(self):
        self.refresh_content()
        return self

    def refresh_content(self):
        # scan through all accessible backends to
        # generate a new listof orphaned content
        self.flush()

        unfiltered = {}
        for host in self.hosts:
            for sg in self.storagegroups:
                try:
                    dirs,files,sizes = self.be.getSGList(host, sg.groupname, sg.dirname)
                    for f,s in zip(files, sizes):
                        newfile = File(host, sg.groupname, sg.dirname, f, s, self.db)
                        # each filename should be unique among all storage directories
                        # defined on all backends, but may exist in the same directory
                        # on multiple backends if they are shared
                        if newfile not in unfiltered:
                            # add a new file to the list
                            unfiltered[str(newfile)] = newfile
                        else:
                            # add a reference to the host on which it was found
                            unfiltered[str(newfile)].add_host(host)
                except:
                    self.log(MythLog.GENERAL, MythLog.INFO, 
                            'Could not access {0.groupname}@{1}{0.dirname}'.format(sg, host))

        for rec in self.db.searchRecorded(livetv=True):
            if rec.hostname not in self.hosts:
                # recording is on an offline backend, ignore it
                name = rec.basename.rsplit('.',1)[0]
                for n in unfiltered.keys():
                    if name in n:
                        # and anything related to it
                        del unfiltered[n]
            elif rec.basename in unfiltered:
                # run through list of recordings, matching basenames
                # with found files, and removing file from list
                f = unfiltered[rec.basename]
                del unfiltered[rec.basename]
                if f.size < 1024:
                    # file is too small to be of any worth
                    self.zerorecs.append(rec)
                elif rec.doubleorphan:
                    # file is marked for deletion, but has been forgotten by the backend
                    self.pendrecs.append(rec)
                elif rec.hostname not in f.hosts:
                    # recording is in the database, but not where it should be
                    self.misplaced.append(rec)

                name = rec.basename.rsplit('.',1)[0]
                for f in unfiltered.keys():
                    if name in f:
                        # file is related to a valid recording, ignore it
                        del unfiltered[f]
            else:
                # recording has been orphaned
                self.orphrecs.append(rec)

        for n,f in unfiltered.iteritems():
            if n.endswith('.mpg') or n.endswith('.nuv'):
                # filter files with recording extensions
                self.orphvids.append(f)
            elif n.endswith('.png'):
                # filter files with image extensions
                self.orphimgs.append(f)
            elif 'sql' in n:
                # filter for database backups
                self.dbbackup.append(f)
            else:
                self.unfiltered.append(f)

    def print_results(self):
        printrecs("Recordings found on the wrong host", self.misplaced)
        printrecs("Recordings with missing files", self.orphrecs)
        printrecs("Zero byte recordings", self.zerorecs)
        printrecs("Forgotten pending deletions", self.pendrecs)
        printfiles("Orphaned video files", self.orphvids)
        printfiles("Orphaned snapshots", self.orphimgs)
        printfiles("Database backups", self.dbbackup)
        printfiles("Other files", self.unfiltered)
class populate(object):
    __metaclass__ = Singleton

    def __init__(self, host=None):
        self.db = MythDB()
        self.db.searchRecorded.handler = Recorded
        self.be = MythBE(db=self.db)
        self.log = MythLog(db=self.db)

        self.set_host(host)
        self.load_backends()
        self.load_storagegroups()

    def set_host(self, host):
        self.host = host
        if host:
            # if the host was defined on the command line, check
            # to make sure such host is defined in the database
            with self.db as c:
                c.execute(
                    """SELECT count(1) FROM settings
                             WHERE hostname=? AND value=?""",
                    (host, 'BackendServerIP'))
                if c.fetchone()[0] == 0:
                    raise Exception('Invalid hostname specified for backend.')

    def load_backends(self):
        with self.db as c:
            c.execute("""SELECT hostname FROM settings
                         WHERE value='BackendServerIP'""")
            hosts = [r[0] for r in c.fetchall()]
        self.hosts = []
        for host in hosts:
            # try to access all defined hosts, and
            # store the ones currently accessible
            try:
                MythBE(backend=host)
                self.hosts.append(host)
            except:
                pass

    def load_storagegroups(self):
        self.storagegroups = \
            [sg for sg in self.db.getStorageGroup() \
                if sg.groupname not in ('Videos','Banners','Coverart',\
                                        'Fanart','Screenshots','Trailers')]

    def flush(self):
        self.misplaced = []
        self.zerorecs = []
        self.pendrecs = []
        self.orphrecs = []
        self.orphvids = []
        self.orphimgs = []
        self.dbbackup = []
        self.unfiltered = []

    def __call__(self):
        self.refresh_content()
        return self

    def refresh_content(self):
        # scan through all accessible backends to
        # generate a new listof orphaned content
        self.flush()

        unfiltered = {}
        for host in self.hosts:
            for sg in self.storagegroups:
                try:
                    dirs, files, sizes = self.be.getSGList(
                        host, sg.groupname, sg.dirname)
                    for f, s in zip(files, sizes):
                        newfile = File(host, sg.groupname, sg.dirname, f, s,
                                       self.db)
                        # each filename should be unique among all storage directories
                        # defined on all backends, but may exist in the same directory
                        # on multiple backends if they are shared
                        if newfile not in unfiltered:
                            # add a new file to the list
                            unfiltered[str(newfile)] = newfile
                        else:
                            # add a reference to the host on which it was found
                            unfiltered[str(newfile)].add_host(host)
                except:
                    self.log(
                        MythLog.GENERAL, MythLog.INFO,
                        'Could not access {0.groupname}@{1}{0.dirname}'.format(
                            sg, host))

        for rec in self.db.searchRecorded(livetv=True):
            if rec.hostname not in self.hosts:
                # recording is on an offline backend, ignore it
                name = rec.basename.rsplit('.', 1)[0]
                for n in unfiltered.keys():
                    if name in n:
                        # and anything related to it
                        del unfiltered[n]
            elif rec.basename in unfiltered:
                # run through list of recordings, matching basenames
                # with found files, and removing file from list
                f = unfiltered[rec.basename]
                del unfiltered[rec.basename]
                if f.size < 1024:
                    # file is too small to be of any worth
                    self.zerorecs.append(rec)
                elif rec.doubleorphan:
                    # file is marked for deletion, but has been forgotten by the backend
                    self.pendrecs.append(rec)
                elif rec.hostname not in f.hosts:
                    # recording is in the database, but not where it should be
                    self.misplaced.append(rec)

                name = rec.basename.rsplit('.', 1)[0]
                for f in unfiltered.keys():
                    if name in f:
                        # file is related to a valid recording, ignore it
                        del unfiltered[f]
            else:
                # recording has been orphaned
                self.orphrecs.append(rec)

        for n, f in unfiltered.iteritems():
            if n.endswith('.mpg') or n.endswith('.nuv'):
                # filter files with recording extensions
                self.orphvids.append(f)
            elif n.endswith('.png'):
                # filter files with image extensions
                self.orphimgs.append(f)
            elif 'sql' in n:
                # filter for database backups
                self.dbbackup.append(f)
            else:
                self.unfiltered.append(f)

    def print_results(self):
        printrecs("Recordings found on the wrong host", self.misplaced)
        printrecs("Recordings with missing files", self.orphrecs)
        printrecs("Zero byte recordings", self.zerorecs)
        printrecs("Forgotten pending deletions", self.pendrecs)
        printfiles("Orphaned video files", self.orphvids)
        printfiles("Orphaned snapshots", self.orphimgs)
        printfiles("Database backups", self.dbbackup)
        printfiles("Other files", self.unfiltered)
Exemple #10
0
class test_Dataheap_Recorded_002(unittest.TestCase):
    """Test creation of a Recoreded and
       writing/reading to the 'recordedrating' table.
       This test uses hardcoded values from the file '.testenv'.
    """
    @classmethod
    def setUpClass(cls):
        # get the global test environment
        global TestEnv
        cls.testenv = TestEnv

    def setUp(self):
        with add_log_flags():
            self.mydb = MythDB()

    def tearDown(self):
        if os.path.exists('/tmp/my_logfile'):
            os.remove('/tmp/my_logfile')

    def test_Dataheap_Recorded_002_01(self):
        """Test creation of a Recoreded and
           writing/reading to the 'recordedrating' table.
           UUT: class DBDataRef
           Caution: recn.update() does not delete a removed entry from the 'recordedrating' table !
           Only recn.rating.clean() removes all entries.
        """

        chanid = self.testenv['DOWNCHANID']
        starttimemyth = self.testenv['DOWNSTARTTIME']

        rec = Recorded((chanid, starttimemyth), db=self.mydb)

        # Recorded.rating is a list of lists of tuples
        # [[(u'system', u'ABCD'), (u'rating', '08.15')], [(u'system', u'WXYZ'), (u'rating', u'0.11')]]

        # add ratings to the recorded instance:
        rec.rating.add(u'ABCD', u'41.98')
        rec.rating.add(u'WXYZ', u'0.11')

        # check the ratings:
        #print(rec.rating)
        s0_found = s1_found = False
        r0_found = r1_found = False
        for (s, r) in rec.rating:
            # print(s)
            # print(r)
            if s == u'ABCD':
                s0_found = True
            if s == u'WXYZ':
                s1_found = True
            if r == u'41.98':
                r0_found = True
            if r == u'0.11':
                r1_found = True
        self.assertTrue(s0_found)
        self.assertTrue(s1_found)
        self.assertTrue(r0_found)
        self.assertTrue(r1_found)

        # revert last changes:
        rec.rating.revert()
        # check for an empty list:
        #print(rec.rating)
        self.assertEqual(len(rec.rating), 0)

        # add ratings again:
        rec.rating.add('ABCD', '41.98')
        rec.rating.add('QWERTZ', 'blah')
        rec.rating.add('WXYZ', '0.11')
        # commit these updates:
        rec.update()

        # get the recorded data again:
        recn = Recorded((chanid, starttimemyth), db=self.mydb)
        # edit existing rating data:
        for i, (s, r) in enumerate(recn.rating):
            if s == 'ABCD':
                break
        if i is not None:
            recn.rating[i]['rating'] = u'08.15'
        # commit that change:
        recn.update()
        # check the changed value:
        #print(rec.rating)
        rn_found = False
        for (s, r) in recn.rating:
            if r == u'08.15':
                rn_found = True
        self.assertTrue(rn_found)

        # delete a rating:
        recn.rating.delete(u'WXYZ', u'0.11')
        recn.update()
        #print(recn.rating)
        sn_found = False
        for (s, r) in recn.rating:
            if s == u'WXYZ':
                sn_found = True
        self.assertFalse(sn_found)

        # clean all ratings for this recorded instance:
        recn.rating.clean()
        recn.update()
        self.assertEqual(len(recn.rating), 0)

    def test_Dataheap_Recorded_002_02(self):
        """Test creation of a Recoreded and
           writing/reading to the 'recordedcredits' table.
           it tests the entries of the 'people' table as well.
           UUT: class DBDataCRef
        """
        """
        $ python - --nodblog --loglevel debug --verbose all --logfile /tmp/my_logfile
        Python 2.7.15+ (default, Oct  7 2019, 17:39:04)
        [GCC 7.4.0] on linux2
        Type "help", "copyright", "credits" or "license" for more information.
        >>> from MythTV import MythDB, Video, Recorded
        >>> d = MythDB()
        _initlogger call
        _parseinput call
        >>> rec =d.searchRecorded(title = 'Die letzte Metro')
        >>> r = next(rec)
        >>> r
        b'<Recorded 'Die letzte Metro','2014-10-16 22:16:00+02:00' at 0x7f96bde242a0>'
        >>> r.cast
        []
        >>> r.cast._refdat
        [11301L, datetime(2014, 10, 16, 20, 18, 21)]
        >>> r.cast._datfields
        [u'name', u'role']

        >>> r.cast.add('Catherine Deneuve', 'actor')
        >>> r.cast
        [[(u'name', 'Catherine Deneuve'), (u'role', 'actor')]]
        >>> r.cast.add(u"Gérard Depardieu", 'actor')
        >>> r.cast.add(u"Andréa Ferréol", 'actor')
        >>> r.cast.add(u"François Truffaut", 'director')
        >>> r.cast
        [[(u'name', 'Catherine Deneuve'), (u'role', 'actor')], [(u'name', u'G\xe9rard Depardieu'), (u'role', 'actor')], [(u'name', u'Andr\xe9a Ferr\xe9ol'), (u'role', 'actor')], [(u'name', u'Fran\xe7ois Truffaut'), (u'role', 'director')]]
        >>> r.update()

        >>> print(r.cast[1]['name'])
        Gérard Depardieu


        >>> r.cast.add(u"Jean Poiret", 'actor')
        >>> r.cast.add(u"Jean-Louis Richard", 'actor')
        >>> r.cast
        [[(u'name', 'Catherine Deneuve'), (u'role', 'actor')], [(u'name', u'G\xe9rard Depardieu'), (u'role', 'actor')], [(u'name', u'Andr\xe9a Ferr\xe9ol'), (u'role', 'actor')], [(u'name', u'Fran\xe7ois Truffaut'), (u'role', 'director')], [(u'name', u'Jean Poiret'), (u'role', 'actor')], [(u'name', u'Jean-Louis Richard'), (u'role', 'actor')]]
        >>> r.update()


        >>> r1 = Recorded((r.chanid, r.starttime), db =d)
        >>> r1
        b'<Recorded 'Die letzte Metro','2014-10-16 22:16:00+02:00' at 0x7f96bde2d868>'
        >>> r1.cast
        [[(u'name', u'Catherine Deneuve'), (u'role', u'actor')], [(u'name', u'G\xe9rard Depardieu'), (u'role', u'actor')], [(u'name', u'Andr\xe9a Ferr\xe9ol'), (u'role', u'actor')], [(u'name', u'Fran\xe7ois Truffaut'), (u'role', u'director')], [(u'name', u'Jean Poiret'), (u'role', u'actor')], [(u'name', u'Jean-Louis Richard'), (u'role', u'actor')]]
        >>> r1.cast.delete(u'Jean-Louis Richard', u'actor')
        >>> r1.cast
        [[(u'name', u'Catherine Deneuve'), (u'role', u'actor')], [(u'name', u'G\xe9rard Depardieu'), (u'role', u'actor')], [(u'name', u'Andr\xe9a Ferr\xe9ol'), (u'role', u'actor')], [(u'name', u'Fran\xe7ois Truffaut'), (u'role', u'director')], [(u'name', u'Jean Poiret'), (u'role', u'actor')]]
        >>> r1.update()
        >>> r1.cast
        [[(u'name', u'Catherine Deneuve'), (u'role', u'actor')], [(u'name', u'G\xe9rard Depardieu'), (u'role', u'actor')], [(u'name', u'Andr\xe9a Ferr\xe9ol'), (u'role', u'actor')], [(u'name', u'Fran\xe7ois Truffaut'), (u'role', u'director')], [(u'name', u'Jean Poiret'), (u'role', u'actor')]]


        Attention: Recorded.cast.delete() deletes the entries in the 'people' table as well !!

        >>> r1.cast.delete(u"Jean Poiret", 'actor')
        >>> r1.update()
        >>> r1.cast.delete(u"François Truffaut", 'director')
        >>> r1.update()
        >>> r1.cast
        [[(u'name', u'Catherine Deneuve'), (u'role', u'actor')], [(u'name', u'G\xe9rard Depardieu'), (u'role', u'actor')], [(u'name', u'Andr\xe9a Ferr\xe9ol'), (u'role', u'actor')]]


        """
        class People(DBData):
            """
            People(data=None, db=None) --> People object to
            database table 'people', data is a `name` string.

            - get information about the table:
              $ mysql -h <master-backend-ip> -u mythtv -p<password-from-config.xml> mythconverg

              MariaDB [mythconverg]> describe people;
                +-------------+-----------------------+------+-----+---------+----------------+
                | Field       | Type                  | Null | Key | Default | Extra          |
                +-------------+-----------------------+------+-----+---------+----------------+
                | person      | mediumint(8) unsigned | NO   | PRI | NULL    | auto_increment |
                | name        | varchar(128)          | NO   | UNI |         |                |
                +-------------+-----------------------+------+-----+---------+----------------+
                2 rows in set (0.00 sec)

            """
            _table = 'people'
            _key = ['name']

            ### end class Person

        # a recording with french accents in the cast

        title = self.testenv['RECFRTITLE']  # "Le Dernier Métro"
        chanid = self.testenv['RECFRCHANID']
        starttimemyth = self.testenv['RECFRSTARTTIMEMYTH']

        print(title)

        castlist = [(u'Catherine Deneuve', u'actor'),
                    (u"Gérard Depardieu", u'actor'),
                    (u"Andréa Ferréol", u'actor'),
                    (u"François Truffaut", u'director')]

        # get a recording, search for the title
        recs = self.mydb.searchRecorded(title=title)
        rec = next(recs)
        self.assertEqual(rec.chanid, int(chanid))

        # backup the cast of this recording
        org_cast = rec.cast

        #         ## backup the people table
        #         #org_people = People(db=self.mydb)

        #         # check if entries in castlist does not occur in cast
        #         for name,role in castlist:
        #             print(name)
        #             print(role)

        # #            if  in rec.cast:
        # #                rec.cast.delete(*c)      # need to dereference the tuple
        #         sys.exit(1)
        #         rec.update()
        #         # remember length
        #         cast_length = len(rec.cast)
        #         # check if the members of the cast are listed
        #         # in the 'people' table
        #         cast_found = False
        #         for c in castlist:
        #             try:
        #                 cname = People(c[0])
        #                 cast_found = True
        #             except:
        #                 pass
        #         # cast should no be listed in the people table
        #         self.assertFalse(cast_found)

        # add castlist to cast
        for c in castlist:
            rec.cast.add(*c)  # need to dereference the tuple
        print(rec.cast)
        #sys.exit(1)
        rec.update()

        # check again if the members of the cast are listed
        # in the 'people' table
        cast_found = False
        for c in castlist:
            try:
                cname = People(c[0])
                cast_found = True
            except:
                pass
        # now cast should be listed in the people table
        self.assertTrue(cast_found)

        # get the len of the rec.casts
        c1_length = len(rec.cast)
        # delete on entry
        rec.cast.delete(*castlist[2])
        rec.update()
        self.assertEqual(c1_length - 1, len(rec.cast))

        # delete all entries
        rec.cast.clean()  # this does a commit as well
        self.assertEqual(len(rec.cast), 0)

        # add the previously saved cast back
        # to a new instance of that recording
        recn = Recorded((rec.chanid, rec.starttime), db=self.mydb)
        for cs in org_cast:
            recn.cast.add(cs)
        recn.update

        self.assertEqual(len(recn.cast), len(org_cast))
Exemple #11
0
class test_Dataheap_Recorded_003(unittest.TestCase):
    """Test creation of a Recoreded and
       writing/reading to the 'recordedrating' table.
       This test uses hardcoded values from the file '.testenv'.
    """
    @classmethod
    def setUpClass(cls):
        # get the global test environment
        global TestEnv
        cls.testenv = TestEnv

    def setUp(self):
        with add_log_flags():
            self.mydb = MythDB()

    def tearDown(self):
        if os.path.exists('/tmp/my_logfile'):
            os.remove('/tmp/my_logfile')

    def test_Dataheap_Recorded_003_01(self):
        """Test creation of a Recoreded and
           writing/reading to the 'recordedcredits' table.
           it tests the entries of the 'people' table as well.
           UUT: class DBDataCRef
        """
        class People(DBDataWrite):
            """
            People(data=None, db=None) --> People object to
            database table 'people', data is a `name` string.

            - get information about the table:
              $ mysql -h <master-backend-ip> -u mythtv -p<password-from-config.xml> mythconverg

              MariaDB [mythconverg]> describe people;
                +-------------+-----------------------+------+-----+---------+----------------+
                | Field       | Type                  | Null | Key | Default | Extra          |
                +-------------+-----------------------+------+-----+---------+----------------+
                | person      | mediumint(8) unsigned | NO   | PRI | NULL    | auto_increment |
                | name        | varchar(128)          | NO   | UNI |         |                |
                +-------------+-----------------------+------+-----+---------+----------------+
                2 rows in set (0.00 sec)

            """
            _table = 'people'
            _key = ['name']

            _defaults = {u'name': ''}

            ### end class People

        # a recording with french accents in the cast

        title = self.testenv[
            'RECFRTITLE']  # "Le Dernier Métro", "Die letzte Metro"
        chanid = self.testenv['RECFRCHANID']
        starttimemyth = self.testenv['RECFRSTARTTIMEMYTH']

        #print(title)

        castlist = [(u'Catherine Deneuve', u'actor'),
                    (u"Gérard Depardieu", u'actor'),
                    (u"Andréa Ferréol", u'actor'), (u"Jean Poiret", 'actor'),
                    (u"François Truffaut", u'director')]

        # ensure, that "Jean Poiret" is already in the 'people' table
        try:
            p = People(u"Jean Poiret", db=self.mydb)
        except:
            p = People(db=self.mydb).create({'name': u"Jean Poiret"})
            p.update()

        # get a recording, search for the title
        recs = self.mydb.searchRecorded(title=title)
        rec = next(recs)
        self.assertEqual(rec.chanid, int(chanid))

        # backup the cast of this recording
        org_cast = rec.cast

        # add castlist to cast
        for c in castlist:
            rec.cast.add(*c)  # need to de-reference the tuple
        print(rec.cast)
        #sys.exit(1)
        rec.update()

        # check again if the members of the cast are listed
        # in the 'people' table
        cast_found = False
        for c in castlist:
            try:
                cname = People(c[0])
                cast_found = True
            except:
                pass
        # now cast should be listed in the people table
        self.assertTrue(cast_found)

        # get the len of the rec.casts
        c1_length = len(rec.cast)
        # delete on entry
        rec.cast.delete(*castlist[2])
        rec.update()
        self.assertEqual(c1_length - 1, len(rec.cast))

        # delete all entries
        rec.cast.clean()  # this does a commit as well
        self.assertEqual(len(rec.cast), 0)

        # add the previously saved cast back
        # to a new instance of that recording
        recn = Recorded((rec.chanid, rec.starttime), db=self.mydb)
        for cs in org_cast:
            recn.cast.add(cs)
        recn.update

        self.assertEqual(len(recn.cast), len(org_cast))

        p.delete()
        p.update()
Exemple #12
0
        help="""Specify that LiveTV recordings are to be linked as well.  Default is to
                only process links for scheduled recordings.""")
parser.add_option("--format", action="store", dest="format",
        help="""Specify the output format to be used to generate link paths.""")
parser.add_option("--underscores", action="store", dest="underscores",
        help="""Replace whitespace in filenames with underscore characters.""")
parser.add_option('-v', '--verbose', action='store', type='string', dest='verbose',
        help='Verbosity level')

opts, args = parser.parse_args()

if opts.dest is None:
    opts.dest = '/mnt/mythtv/by-title'
if opts.format is None:
    format = '%T/(%oY%-%om%-%od) %S'
if opts.jobid:
    db = MythDB()
    job = Job(opts.jobid, db=db)
    rec = Recorded((job.chanid, job.starttime), db=db)
    gen_link(rec)
elif opts.chanid and opts.starttime:
    rec = Recorded((opts.chanid, opts.starttime)
    gen_link(rec)
elif opts.filename:
    db = MythDB()
    rec = db.searchRecorded(basename=opts.filename)
    gen_link(rec)
else:
    link_all()

Exemple #13
0
def main():
	' setup logger, all to stdout and INFO and higher to LOGFILE '
	logging.basicConfig(format='%(message)s',
				level=logging.NOTSET)
	loggingfile = logging.handlers.RotatingFileHandler(LOGFILE, maxBytes=(MAXLOGSIZE), backupCount=MAXLOGS)
	loggingfile.setLevel(logging.INFO)
	formatter = logging.Formatter('%(asctime)s: %(message)s', datefmt='%m-%d %H:%M')
	loggingfile.setFormatter(formatter)
	logging.getLogger('').addHandler(loggingfile)

	' connect to mythtv database '
	db = MythDB()
	be = MythBE(db=db)

	' loop all files in lib_dir that are symlinks and create listing '
	listings = []
	for ld in LIBDIR:
		for dp, dn, files in os.walk(ld):
			for file in files:
				filepath = os.path.join(dp,file)
				if (os.path.islink(filepath)):
					listings.append(lib_listing(filepath))

	' get list of all recordings from MythDB, link with library, figure out their status '
	recordings = []
	activeRecordings = []
	activeJobs = []
	newExpireList = []
	mythrecordings = db.searchRecorded()
	recorderList = be.getRecorderList()
	for mrec in mythrecordings:
		logging.debug(mrec)
		rec = recording(mrec)
		recordings.append(rec)

		' skip items already set to autoexpire '
		if (rec.mythrec.autoexpire == 1):
			logging.debug(" - already set to expire, skip")
			continue

		' loop through the list of library items looking for matching recordings, linking them '
		for l in listings:
			if rec.match(l.symlink):
				if rec.lib_listing != None:
					logging.error("UH OH! Linking with something already linked!")
				else:
					rec.lib_listing = l

		' figure out if the recording is active '
		for recorder in recorderList:
			arec = be.getCurrentRecording(recorder)
			if (arec['title'] is not None) and (arec['chanid'] is not 0) and \
			   (arec == rec.program):
				logging.debug(" - currently recording, skip")
				activeRecordings.append(rec)
				rec.state = RecordingState.Recording
				break
		if (rec.state != RecordingState.Recorded):
			continue

		' figuire if the recording is part of an active job '
		jobs = db.searchJobs() #need to generate jobs on each loop
		for job in jobs:
			if job.status in [Job.ABORTING, Job.ERRORING, Job.PAUSED, Job.PENDING, \
					  Job.QUEUED, Job.RETRY, Job.RUNNING, Job.STARTING, Job.STOPPING]:
				jobrec = Recorded((job.chanid, job.starttime), db=db)
				if rec.mythrec == jobrec:
					logging.debug(" - currently part of a job, skip")
					activeJobs.append(rec)
					rec.state = RecordingState.BusyJob
					break
		if (rec.state != RecordingState.Recorded):
			continue

		' potentially add to auto-expire list, and set orphaned recordings to auto-expire '
		if (rec.lib_listing == None) and (rec.state == RecordingState.Recorded):
			logging.debug(" - no link, auto-expire")
			newExpireList.append(rec)
			# rec.mythrec.delete(force=True, rerecord=False)
			rec.mythrec.autoexpire = 1
			rec.mythrec.update()

	' log summary '
	logging.info("")
	logging.info("** Summary **")
	logging.info(" [MythDB Recordings][%s]" % len(recordings))
	logging.info("  - active recordings: %s" % len(activeRecordings))
	for arec in activeRecordings:
		logging.info("   - %s [%s]" % (arec.program, arec.metadata.filename))
	logging.info("  - in progress jobs: %s" % len(activeJobs))
	for ajobs in activeJobs:
		logging.info("   - %s [%s]" % (ajobs.program, ajobs.metadata.filename))

	logging.info("")
	logging.info(" [Mythical Links][%s]" % len(listings))
	logging.info("  - new auto-expire items: %s" % len(newExpireList))
	for d in newExpireList:
		logging.info( "   - %s [%s]" % (d.program, d.metadata.filename))
Exemple #14
0
#!/usr/bin/env python
from MythTV import MythDB
from datetime import datetime, timedelta

MAXAGE=365 # number of days

db = MythDB()
recs = db.searchRecorded(custom=(('starttime<%s', datetime.now()-timedelta(MAXAGE)),))
if recs is None:
    print 'No old recordings to delete'
else:
    for rec in recs:
        if rec.subtitle:
            print 'Deleting "%s: %s - %s"' % (rec.endtime, rec.title, rec.subtitle)
        else:
            print 'Deleting %s: "%s"' % (rec.endtime, rec.title)
        rec.delete()


class test_Methodheap_MythDB_001(unittest.TestCase):
    """Test method 'searchRecorded' from MythTV.MythDB().
       This test uses hardcoded values from the file '.testenv'.
    """

    @classmethod
    def setUpClass(cls):
        # get the global test environment
        global TestEnv
        cls.testenv = TestEnv

    def setUp(self):
        self.mydb = MythDB()


    def test_Methodheap_MythDB_001_searchRecorded_01(self):
        """Test 'seachRecorded' method from MythTV.MythDB() using 'chanid'.
        """

        recs =  self.mydb.searchRecorded(chanid = self.testenv['RECCHANID'])
        rec01 = next(recs)
        self.assertTrue(isinstance(rec01, Recorded))


    def test_Methodheap_MythDB_001_searchRecorded_02(self):
        """Test 'seachRecorded' method from MythTV.MythDB() using 'chanid', 'title'.
        """

        recs =  self.mydb.searchRecorded( chanid = self.testenv['RECCHANID']
                                        , title  = self.testenv['RECTITLE']
                                        )
        rec01 = next(recs)
        self.assertTrue(isinstance(rec01, Recorded))
        # check if accessing a property works
        self.assertTrue(rec01.basename == self.testenv['RECBASENAME'])


    def test_Methodheap_MythDB_001_searchRecorded_03(self):
        """Test 'seachRecorded' method from MythTV.MythDB() using 'newerthan'.
        """

        # substract 1 minute from starttime
        starttime_before = int(self.testenv['RECSTARTTIMEMYTH']) - 100
        recs =  self.mydb.searchRecorded( chanid = self.testenv['RECCHANID']
                                        , newerthan = starttime_before
                                        )
        rec01 = next(recs)
        self.assertTrue(isinstance(rec01, Recorded))
        # check if accessing a property works
        self.assertTrue(rec01.basename == self.testenv['RECBASENAME'])


    def test_Methodheap_MythDB_001_searchRecorded_04(self):
        """Test 'seachRecorded' method from MythTV.MythDB() using 'olderthan' and 'newerthan'.
        Time is given in 'mythtime' notation.
        """

        # substract / add 1 minute from/to starttime
        starttime_before = int(self.testenv['RECSTARTTIMEMYTH']) - 100
        starttime_after  = int(self.testenv['RECSTARTTIMEMYTH']) + 100
        recs =  self.mydb.searchRecorded( chanid = self.testenv['RECCHANID']
                                        , olderthan = starttime_after
                                        , newerthan = starttime_before
                                        )
        rec01 = next(recs)
        self.assertTrue(isinstance(rec01, Recorded))
        # check if accessing a property works
        self.assertTrue(rec01.basename == self.testenv['RECBASENAME'])


    def test_Methodheap_MythDB_001_searchRecorded_05(self):
        """Test 'seachRecorded' method from MythTV.MythDB() using 'olderthan' and 'newerthan'.
           Time values are passed in UTC and ISO format.
        """

        # substract / add 1 minute from/to starttime
        starttime_before = int(self.testenv['RECSTARTTIMEMYTH']) - 100
        starttime_after  = int(self.testenv['RECSTARTTIMEMYTH']) + 100

        # transform to utc iso, like '2019-03-05T12:50:00Z'
        starttime_before_utc_iso = datetime.duck(starttime_before).utcisoformat() +"Z"
        starttime_after_utc_iso  = datetime.duck(starttime_after).utcisoformat() +"Z"

        recs =  self.mydb.searchRecorded( chanid = self.testenv['RECCHANID']
                                        , olderthan = starttime_after_utc_iso
                                        , newerthan = starttime_before_utc_iso
                                        )
        rec01 = next(recs)

        self.assertTrue(isinstance(rec01, Recorded))
        # check if accessing a property works
        self.assertTrue(rec01.basename == self.testenv['RECBASENAME'])


    def test_Methodheap_MythDB_001_searchRecorded_06(self):
        """Test 'seachRecorded' method from MythTV.MythDB() using 'olderthan',
           'newerthan' and the 'closedcaptioned' property.
           Time values are passed in UTC and ISO format.
        """

        # substract / add 1 minute from/to starttime
        starttime_before = int(self.testenv['RECSTARTTIMEMYTH']) - 100
        starttime_after  = int(self.testenv['RECSTARTTIMEMYTH']) + 100

        # transform to utc iso, like '2019-03-05T12:50:00Z'
        starttime_before_utc_iso = datetime.duck(starttime_before).utcisoformat() +"Z"
        starttime_after_utc_iso  = datetime.duck(starttime_after).utcisoformat() +"Z"

        recs =  self.mydb.searchRecorded( chanid = self.testenv['RECCHANID']
                                        , olderthan = starttime_after_utc_iso
                                        , newerthan = starttime_before_utc_iso
                                        , closecaptioned = 0
                                        )
        rec01 = next(recs)

        self.assertTrue(isinstance(rec01, Recorded))
        # check if accessing a property works
        self.assertTrue(rec01.basename == self.testenv['RECBASENAME'])