def delete_old_files_2(): """ delete cachfiles/entries for files which don't exists anymore """ print checking('deleting old web-server thumbnails'), sys.__stdout__.flush() num = 0 for file in util.match_files_recursively(vfs.www_image_cachedir(), config.IMAGE_SUFFIX): if not vfs.isfile( file[len(vfs.www_image_cachedir()):file.rindex('.')]): os.unlink(file) num += 1 print 'deleted %s file%s' % (num, num != 1 and 's' or '') print checking('deleting old cache files'), sys.__stdout__.flush() num = 0 for file in util.match_files_recursively(config.OVERLAY_DIR, ['raw']): if file.startswith(os.path.join(config.OVERLAY_DIR, 'disc')): continue if not vfs.isfile(file[len(config.OVERLAY_DIR):-4]): os.unlink(file) num += 1 print 'deleted %s file%s' % (num, num != 1 and 's' or '') print checking('deleting cache for directories not existing anymore'), subdirs = util.get_subdirs_recursively(config.OVERLAY_DIR) subdirs.reverse() for file in subdirs: if not os.path.isdir(file[len(config.OVERLAY_DIR):]) and not \ file.startswith(os.path.join(config.OVERLAY_DIR, 'disc')): for metafile in ('cover.png', 'cover.png.raw', 'cover.jpg', 'cover.jpg.raw', 'mmpython.cache', 'freevo.cache'): if os.path.isfile(os.path.join(file, metafile)): os.unlink(os.path.join(file, metafile)) if not os.listdir(file): os.rmdir(file) print 'done' print checking('deleting old entries in meta-info'), sys.__stdout__.flush() for filename in util.recursefolders(config.OVERLAY_DIR, 1, 'freevo.cache', 1): if filename.startswith(os.path.join(config.OVERLAY_DIR, 'disc')): continue sinfo = os.stat(filename) if not sinfo[ST_SIZE]: #print '%s is empty' % filename continue dirname = os.path.dirname(filename)[len(config.OVERLAY_DIR):] data = util.read_pickle(filename) for key in copy.copy(data): if not os.path.exists(os.path.join(dirname, str(key))): del data[key] util.save_pickle(data, filename) print 'done'
def delete_old_files_2(): """ delete cachfiles/entries for files which don't exists anymore """ print checking('deleting old web-server thumbnails'), sys.__stdout__.flush() num = 0 for file in util.match_files_recursively(vfs.www_image_cachedir(), config.IMAGE_SUFFIX): if not vfs.isfile(file[len(vfs.www_image_cachedir()):file.rindex('.')]): os.unlink(file) num += 1 print 'deleted %s file%s' % (num, num != 1 and 's' or '') print checking('deleting old cache files'), sys.__stdout__.flush() num = 0 for file in util.match_files_recursively(config.OVERLAY_DIR, ['raw']): if file.startswith(os.path.join(config.OVERLAY_DIR, 'disc')): continue if not vfs.isfile(file[len(config.OVERLAY_DIR):-4]): os.unlink(file) num += 1 print 'deleted %s file%s' % (num, num != 1 and 's' or '') print checking('deleting cache for directories not existing anymore'), subdirs = util.get_subdirs_recursively(config.OVERLAY_DIR) subdirs.reverse() for file in subdirs: if not os.path.isdir(file[len(config.OVERLAY_DIR):]) and not \ file.startswith(os.path.join(config.OVERLAY_DIR, 'disc')): for metafile in ('cover.png', 'cover.png.raw', 'cover.jpg', 'cover.jpg.raw', 'mmpython.cache', 'freevo.cache'): if os.path.isfile(os.path.join(file, metafile)): os.unlink(os.path.join(file, metafile)) if not os.listdir(file): os.rmdir(file) print 'done' print checking('deleting old entries in meta-info'), sys.__stdout__.flush() for filename in util.recursefolders(config.OVERLAY_DIR, 1, 'freevo.cache', 1): if filename.startswith(os.path.join(config.OVERLAY_DIR, 'disc')): continue sinfo = os.stat(filename) if not sinfo[ST_SIZE]: #print '%s is empty' % filename continue dirname = os.path.dirname(filename)[len(config.OVERLAY_DIR):] data = util.read_pickle(filename) for key in copy.copy(data): if not os.path.exists(os.path.join(dirname, str(key))): del data[key] util.save_pickle(data, filename) print 'done'
def cache_thumbnails(): """ cache all image files by creating thumbnails """ import cStringIO print checking('checking thumbnails'), sys.__stdout__.flush() files = [] for d in config.VIDEO_ITEMS + config.AUDIO_ITEMS + config.IMAGE_ITEMS: try: d = d[1] except: pass if not os.path.isdir(d): continue files += util.match_files_recursively(d, config.IMAGE_SUFFIX) + \ util.match_files_recursively(vfs.getoverlay(d), config.IMAGE_SUFFIX) files = util.misc.unique(files) for filename in copy.copy(files): thumb = vfs.getoverlay(filename + '.raw') try: sinfo = os.stat(filename) if os.stat(thumb)[ST_MTIME] > sinfo[ST_MTIME]: files.remove(filename) except OSError: pass for bad_dir in ('.svn', '.xvpics', '.thumbnails', '.pics'): if filename.find(os.path.join(os.path.sep, bad_dir + '')) > 0: try: files.remove(filename) except: pass print '%s file%s' % (len(files), len(files) != 1 and 's' or '') for filename in files: fname = filename if len(fname) > 65: fname = fname[:20] + ' [...] ' + fname[-40:] print ' %4d/%-4d %s' % (files.index(filename) + 1, len(files), Unicode(fname)) util.cache_image(filename) if files: print
def cache_thumbnails(): """ cache all image files by creating thumbnails """ import cStringIO print checking('checking thumbnails'), sys.__stdout__.flush() files = [] for d in config.VIDEO_ITEMS + config.AUDIO_ITEMS + config.IMAGE_ITEMS: try: d = d[1] except: pass if not os.path.isdir(d): continue files += util.match_files_recursively(d, config.IMAGE_SUFFIX) + \ util.match_files_recursively(vfs.getoverlay(d), config.IMAGE_SUFFIX) files = util.misc.unique(files) for filename in copy.copy(files): thumb = vfs.getoverlay(filename + '.raw') try: sinfo = os.stat(filename) if os.stat(thumb)[ST_MTIME] > sinfo[ST_MTIME]: files.remove(filename) except OSError: pass for bad_dir in ('.svn', '.xvpics', '.thumbnails', '.pics'): if filename.find(os.path.join(os.path.sep, bad_dir + '')) > 0: try: files.remove(filename) except: pass print '%s file%s' % (len(files), len(files) != 1 and 's' or '') for filename in files: fname = filename if len(fname) > 65: fname = fname[:20] + ' [...] ' + fname[-40:] print ' %4d/%-4d %s' % (files.index(filename)+1, len(files), Unicode(fname)) util.cache_image(filename) if files: print
def delete_old_files_1(): """ delete old files from previous versions of freevo which are not needed anymore """ #TODO Add WWW_LINK_CACHE and WWW_IMAGE_CACNE print checking('deleting old cache files from older freevo version'), sys.__stdout__.flush() del_list = [] #for name in ('image-viewer-thumb.jpg', 'thumbnails', 'audio', 'mmpython', 'disc', 'image_cache', 'link_cache'): for name in ('image-viewer-thumb.jpg', 'thumbnails', 'audio', 'mmpython', 'disc'): if os.path.exists(os.path.join(config.FREEVO_CACHEDIR, name)): del_list.append(os.path.join(config.FREEVO_CACHEDIR, name)) del_list += util.recursefolders(config.OVERLAY_DIR, 1, 'mmpython', 1) del_list += util.match_files(os.path.join(config.OVERLAY_DIR, 'disc'), ['mmpython', 'freevo']) for file in util.match_files_recursively(config.OVERLAY_DIR, ['png']): if file.endswith('.fvt.png'): del_list.append(file) for f in del_list: if os.path.isdir(f): util.rmrf(f) else: os.unlink(f) print 'deleted %s file%s' % (len(del_list), len(del_list) != 1 and 's' or '')
def cache_video_thumbs(defaults): import util.videothumb print checking('creating video thumbnails'), print sys.__stdout__.flush() files = [] for dirname in defaults['directory']: if defaults['dry_run']: print dirname if os.path.isdir(dirname): if defaults['recursive']: files += util.match_files_recursively( dirname, config.VIDEO_MPLAYER_SUFFIX) else: files += util.match_files(dirname, config.VIDEO_MPLAYER_SUFFIX) else: files += [os.path.abspath(dirname)] files = util.misc.unique(files) for filename in files[:]: print ' %4d/%-4d %s' % (files.index(filename) + 1, len(files), Unicode(os.path.basename(filename))) if defaults['dry_run']: continue util.videothumb.snapshot(filename, update=False) print
def disc_info(media, force=False): """ return kaa metadata disc information for the media """ discinfo = mmpython.parse(media.devicename) if not discinfo or not discinfo.id: # bad disc, e.g. blank disc return {} cachedir = os.path.join(config.OVERLAY_DIR, 'disc/metadata') cachefile = os.path.join(cachedir, discinfo.id + '.freevo') if os.path.isfile(cachefile): metainfo = util.read_pickle(cachefile) else: metainfo = {} if force or discinfo.mime == 'unknown/unknown' and not metainfo.has_key('disc_num_video'): media.mount() for type in ('video', 'audio', 'image'): items = getattr(config, '%s_SUFFIX' % type.upper()) files = util.match_files_recursively(media.mountdir, items) metainfo['disc_num_%s' % type] = len(files) media.umount() util.save_pickle(metainfo, cachefile) info = Info(cachefile, discinfo, metainfo) info.disc = True return info
def cache_video_thumbs(defaults): import util.videothumb print checking('creating video thumbnails'), print sys.__stdout__.flush() files = [] for dirname in defaults['directory']: if defaults['dry_run']: print dirname if os.path.isdir(dirname): if defaults['recursive']: files += util.match_files_recursively(dirname, config.VIDEO_MPLAYER_SUFFIX) else: files += util.match_files(dirname, config.VIDEO_MPLAYER_SUFFIX) else: files += [ os.path.abspath(dirname) ] files = util.misc.unique(files) for filename in files[:]: print ' %4d/%-4d %s' % (files.index(filename)+1, len(files), Unicode(os.path.basename(filename))) if defaults['dry_run']: continue util.videothumb.snapshot(filename, update=False) print
def cache_www_thumbnails(defaults): """ cache all image files for web server by creating thumbnails """ import cStringIO import stat print checking('checking web-server thumbnails'), print sys.__stdout__.flush() files = [] for dirname in defaults['directory']: if defaults['dry_run']: print dirname if os.path.isdir(dirname): if defaults['recursive']: files += util.match_files_recursively(dirname, config.IMAGE_SUFFIX) else: files += util.match_files(dirname, config.IMAGE_SUFFIX) else: files += [os.path.abspath(dirname)] files = util.misc.unique(files) for filename in files[:]: thumb = util.www_image_path(filename, '.thumbs') try: sinfo = os.stat(filename) if os.stat(thumb)[ST_MTIME] > sinfo[ST_MTIME]: files.remove(filename) except OSError: pass for bad_dir in ('.svn', '.xvpics', '.thumbnails', '.pics'): if filename.find(os.path.join(os.path.sep, bad_dir + '')) > 0: try: files.remove(filename) except: pass print '%s file%s' % (len(files), len(files) != 1 and 's' or '') for filename in files: fname = filename if len(fname) > 65: fname = fname[:20] + ' [...] ' + fname[-40:] print ' %4d/%-4d %s' % (files.index(filename) + 1, len(files), Unicode(fname)) if defaults['dry_run']: continue util.www_thumb_create(filename) if files: print
def cache_www_thumbnails(defaults): """ cache all image files for web server by creating thumbnails """ import cStringIO import stat print checking('checking web-server thumbnails'), print sys.__stdout__.flush() files = [] for dirname in defaults['directory']: if defaults['dry_run']: print dirname if os.path.isdir(dirname): if defaults['recursive']: files += util.match_files_recursively(dirname, config.IMAGE_SUFFIX) else: files += util.match_files(dirname, config.IMAGE_SUFFIX) else: files += [ os.path.abspath(dirname) ] files = util.misc.unique(files) for filename in files[:]: thumb = util.www_image_path(filename, '.thumbs') try: sinfo = os.stat(filename) if os.stat(thumb)[ST_MTIME] > sinfo[ST_MTIME]: files.remove(filename) except OSError: pass for bad_dir in ('.svn', '.xvpics', '.thumbnails', '.pics'): if filename.find(os.path.join(os.path.sep, bad_dir + '')) > 0: try: files.remove(filename) except: pass print '%s file%s' % (len(files), len(files) != 1 and 's' or '') for filename in files: fname = filename if len(fname) > 65: fname = fname[:20] + ' [...] ' + fname[-40:] print ' %4d/%-4d %s' % (files.index(filename)+1, len(files), Unicode(fname)) if defaults['dry_run']: continue util.www_thumb_create(filename) if files: print
def fxdhandler(self, fxd, node): """ parse image specific stuff from fxd files:: <?xml version="1.0" ?> <freevo> <slideshow title="foo" random="1|0" repeat="1|0"> <cover-img>foo.jpg</cover-img> <background-music random="1|0"> <directory recursive="1|0">path</directory> <file>filename</file> </background-music> <files> <directory recursive="1|0" duration="10">path</directory> <file duration="0">filename</file> </files> <info> <description>A nice description</description> </info> </slideshow> </freevo> """ items = [] dirname = os.path.dirname(fxd.getattr(None, 'filename', '')) children = fxd.get_children(node, 'files') if children: children = children[0].children for child in children: try: citems = [] fname = os.path.join(dirname, String(fxd.gettext(child))) if child.name == 'directory': if fxd.getattr(child, 'recursive', 0): f = util.match_files_recursively(fname, self.suffix(), skip_password=True) else: f = util.match_files(fname, self.suffix()) citems = self.get(None, f) elif child.name == 'file': citems = self.get(None, [fname]) duration = fxd.getattr(child, 'duration', 0) if duration: for i in citems: i.duration = duration items += citems except OSError, e: print 'slideshow error:', e
def fxdhandler(self, fxd, node): """ parse image specific stuff from fxd files:: <?xml version="1.0" ?> <freevo> <slideshow title="foo" random="1|0" repeat="1|0"> <cover-img>foo.jpg</cover-img> <background-music random="1|0"> <directory recursive="1|0">path</directory> <file>filename</file> </background-music> <files> <directory recursive="1|0" duration="10">path</directory> <file duration="0">filename</file> </files> <info> <description>A nice description</description> </info> </slideshow> </freevo> """ items = [] dirname = os.path.dirname(fxd.getattr(None, 'filename', '')) children = fxd.get_children(node, 'files') if children: children = children[0].children for child in children: try: citems = [] fname = os.path.join(dirname, String(fxd.gettext(child))) if child.name == 'directory': if fxd.getattr(child, 'recursive', 0): f = util.match_files_recursively(fname, self.suffix(), skip_password=True) else: f = util.match_files(fname, self.suffix()) citems = self.get(None, f) elif child.name == 'file': citems = self.get(None, [ fname ]) duration = fxd.getattr(child, 'duration', 0) if duration: for i in citems: i.duration = duration items += citems except OSError, e: print 'slideshow error:', e
def cache_www_thumbnails(): """ cache all image files for web server by creating thumbnails """ import cStringIO import stat print 'checking webserver thumbnails.........................', sys.__stdout__.flush() files = [] for d in config.IMAGE_ITEMS: try: d = d[1] except: pass if not os.path.isdir(d): continue files += util.match_files_recursively(d, config.IMAGE_SUFFIX) files = util.misc.unique(files) for filename in copy.copy(files): thumb = util.www_thumbnail_path(filename) try: sinfo = os.stat(filename) if os.stat(thumb)[stat.ST_MTIME] > sinfo[stat.ST_MTIME]: files.remove(filename) except OSError: pass for bad_dir in ('.svn', '.xvpics', '.thumbnails', '.pics'): if filename.find('/' + bad_dir + '/') > 0: try: files.remove(filename) except: pass print '%s file%s' % (len(files), len(files) != 1 and 's' or '') for filename in files: fname = filename if len(fname) > 65: fname = fname[:20] + ' [...] ' + fname[-40:] print ' %4d/%-4d %s' % (files.index(filename) + 1, len(files), fname) util.create_www_thumbnail(filename) if files: print
def queue_file(self, arg=None, menuw=None): if not self.playlist_handle: self.playlist_handle = open(('%s/%s.m3u' % (self.playlist_folder, time.strftime(self.naming))), 'w+') for f in self.item.files.get(): if os.path.isdir(f): for file in util.match_files_recursively(f, config.AUDIO_SUFFIX, skip_password=True): self.playlist_handle.write('%s\n' % os.path.join(f, file)) else: self.playlist_handle.write('%s\n' % f) self.playlist_handle.flush() if menuw: if self.item.type == 'dir': menuw.delete_submenu(True, True, _('Queued Directory')) else: menuw.delete_submenu(True, True, _('Queued Track')) return
def disc_info(media, force=False): """ return kaa metadata disc information for the media """ type, id = mmpython.cdrom.status(media.devicename) if not id: # bad disc, e.g. blank disc return {} cachedir = os.path.join(config.OVERLAY_DIR, 'disc/metadata') cachefile = os.path.join(cachedir, id + '.mmpython') if os.path.isfile(cachefile) and not force: mmdata = util.read_pickle(cachefile) else: mmdata = mmpython.parse(media.devicename) if not mmdata: print '*****************************************' print 'Error detecting the disc in %r' % (media.devicename) print 'Please contact the developers' print '*****************************************' return {} else: util.save_pickle(mmdata, cachefile) cachefile = os.path.join(cachedir, id + '.freevo') if os.path.isfile(cachefile): metainfo = util.read_pickle(cachefile) else: metainfo = {} if mmdata.mime == 'unknown/unknown' and not metainfo.has_key( 'disc_num_video'): media.mount() for type in ('video', 'audio', 'image'): items = getattr(config, '%s_SUFFIX' % type.upper()) files = util.match_files_recursively(media.mountdir, items) metainfo['disc_num_%s' % type] = len(files) media.umount() util.save_pickle(metainfo, cachefile) info = Info(cachefile, mmdata, metainfo) info.disc = True return info
def queue_file(self, arg=None, menuw=None): if not self.playlist_handle: self.playlist_handle = open( ('%s/%s.m3u' % (self.playlist_folder, time.strftime(self.naming))), 'w+') for f in self.item.files.get(): if os.path.isdir(f): for file in util.match_files_recursively( f, config.AUDIO_SUFFIX): self.playlist_handle.write('%s\n' % os.path.join(f, file)) else: self.playlist_handle.write('%s\n' % f) self.playlist_handle.flush() if menuw: if self.item.type == 'dir': menuw.delete_submenu(True, True, _('Queued Directory')) else: menuw.delete_submenu(True, True, _('Queued Track')) return
if opts.debug: config.DEBUG = opts.debug if opts.trace: # activate a trace function tracefile = os.path.join(config.FREEVO_LOGDIR, 'trace.txt') print 'tracing to %r' % (tracefile,) tracefd = open(tracefile, 'w') sys.settrace(tracefunc) if opts.doc: # create api doc for Freevo and move it to Docs/api import pydoc import re for file in util.match_files_recursively('src/', ['py' ]): # doesn't work for everything :-( if file not in ( 'src/tv/record_server.py', ) and \ file.find('src/www') == -1 and file.find('src/helpers') == -1: file = re.sub('/', '.', file) try: pydoc.writedoc(file[4:-3]) except: pass try: os.mkdir('Docs/api') except: pass for file in util.match_files('.', ['html', ]): print 'moving %s' % file os.rename(file, 'Docs/api/%s' % file)
line = line.replace(var, change_map[var]) else: print 'changing config file line:' print line[:-1] print line[:-1].replace(var, change_map[var]) print if out: out.write(line) if out: out.close() if len(sys.argv) <= 3 and sys.argv[1] == '-s': print 'searching for files using old style variables' # s = '' # for var in change_map: # s += '|%s' % var # s = '(%s)' % s[1:] # pipe = 'xargs egrep \'%s\' | grep -v helpers/convert_config' % s # os.system('find . -name \*.py | %s' % pipe) # os.system('find . -name \*.rpy | %s' % pipe) # print # print # print 'starting scanning all files in detail:' for f in util.match_files_recursively('src', ['py', 'rpy']): change(f, print_name=True) sys.exit(0) change(sys.argv[1])
_debug_('os.system(\'xsetroot -cursor %s\')' % config.OSD_X11_CURSORS) os.system('xsetroot -cursor %s' % config.OSD_X11_CURSORS) config.START_FULLSCREEN_X = 1 # activate a trace function if sys.argv[1] == '--trace': tracefd = open(os.path.join(config.FREEVO_LOGDIR, 'trace.txt'), 'w') sys.settrace(tracefunc) config.DEBUG = 2 # create api doc for Freevo and move it to Docs/api if sys.argv[1] == '--doc': import pydoc import re for file in util.match_files_recursively('src/', ['py']): # doesn't work for everything :-( if file not in ( 'src/tv/record_server.py', ) and \ file.find('src/www') == -1 and \ file.find('src/helpers') == -1: file = re.sub('/', '.', file) try: pydoc.writedoc(file[4:-3]) except: pass try: os.mkdir('Docs/api') except: pass for file in util.match_files('.', [ 'html',
# Print the long imdb title for every movie. for movie in results: outp = u'%s\t: %s : %s' % (movie.movieID, fxd.imdb.get_imdbID(movie), movie['long imdb title']) print outp.encode(opts.encoding, 'replace') print '%s' % title.encode(opts.encoding) sys.exit(0) if opts.bulk: imdb_id = args[0] directory = args[1] # scan the dir for all VIDEO items items = [] for file in util.match_files_recursively(directory, config.VIDEO_SUFFIX): items.append((file, os.path.split(file)[0], os.path.split(file)[1])) # now, for each item bulk fetch imdb data, and then write fxd to disk for fxd in fxd.retrieveImdbBulkSeriesData(imdb_id, items): fxd.writeFxd() sys.exit(0) if opts.tv: print "Searching IMDB for '%s' season:%s episode:%s..." % (opts.tv, opts.season, opts.episode) result = fxd.retrieveImdbData(opts.tv, opts.season, opts.episode) if len(results) == 0: print 'No results' title = 'http://www.imdb.com/title/tt%s/ %s' % (results, results) print '%s' % title.encode(opts.encoding)
print line[:-1] print re.sub(repat, change_map[var], line)[:-1] print if out: out.write(line) if out: out.close() if opts.scan: print "searching for files using old style variables..." # s = '' # for var in change_map: # s += '|%s' % var # s = '(%s)' % s[1:] # pipe = 'xargs egrep \'%s\' | grep -v helpers/convert_config' % s # os.system('find . -name \*.py | %s' % pipe) # os.system('find . -name \*.rpy | %s' % pipe) # print # print # print 'starting scanning all files in detail:' import util for f in util.match_files_recursively("src", ["py", "rpy"]): change(f, print_name=True) sys.exit(0) change(opts.file)
for file in files: #print file.__dict__ fxd.setattr(file, 'mplayer-options', '-vf crop=%s' % encjob.crop) plugin.register_callback('fxditem', ['video'], 'movie', fxdhandler.parse_movie) plugin.register_callback('fxditem', ['video'], 'disc-set', fxdhandler.parse_disc_set) print checking('checking cropdetect'), sys.__stdout__.flush() fxdfiles = [] for dirname in defaults['directory']: if os.path.isdir(dirname): if defaults['recursive']: fxdfiles += util.match_files_recursively(dirname, fxditem.mimetype.suffix()) else: fxdfiles += util.match_files(dirname, fxditem.mimetype.suffix()) else: fxdfiles += [ os.path.abspath(dirname) ] fxdfiles.sort(lambda l, o: cmp(l.lower(), o.lower())) fxdfiles = util.misc.unique(fxdfiles) files = [] for info in fxditem.mimetype.parse(None, fxdfiles, display_type='video'): if defaults['rebuild'] or (hasattr(info, 'mplayer_options') and not info.mplayer_options): files.append(info.files) for fileinfo in copy.copy(files): filename = fileinfo.fxd_file print
sys.exit(1) if __name__ == "__main__": os.umask(config.UMASK) if len(sys.argv) > 1 and sys.argv[1] == '--help': print_help() sys.exit(0) if len(sys.argv) > 1 and sys.argv[1] == '--thumbnail': import util.videothumb if len(sys.argv) > 2: if sys.argv[2] == '--recursive': if len(sys.argv) > 3: dirname = os.path.abspath(sys.argv[3]) files = util.match_files_recursively( dirname, config.VIDEO_SUFFIX) else: print_error_and_exit() elif os.path.isdir(sys.argv[2]): dirname = os.path.abspath(sys.argv[2]) files = util.match_files(dirname, config.VIDEO_SUFFIX) else: files = [os.path.abspath(sys.argv[2])] print 'creating video thumbnails....' for filename in files: print ' %4d/%-4d %s' % (files.index(filename) + 1, len(files), os.path.basename(filename)) util.videothumb.snapshot(filename, update=False) print
fxd.setattr(file, 'mplayer-options', '-vf crop=%s' % encjob.crop) plugin.register_callback('fxditem', ['video'], 'movie', fxdhandler.parse_movie) plugin.register_callback('fxditem', ['video'], 'disc-set', fxdhandler.parse_disc_set) print checking('checking cropdetect'), sys.__stdout__.flush() fxdfiles = [] for dirname in defaults['directory']: if os.path.isdir(dirname): if defaults['recursive']: fxdfiles += util.match_files_recursively( dirname, fxditem.mimetype.suffix()) else: fxdfiles += util.match_files(dirname, fxditem.mimetype.suffix()) else: fxdfiles += [os.path.abspath(dirname)] fxdfiles.sort(lambda l, o: cmp(l.lower(), o.lower())) fxdfiles = util.misc.unique(fxdfiles) files = [] for info in fxditem.mimetype.parse(None, fxdfiles, display_type='video'): if defaults['rebuild'] or (hasattr(info, 'mplayer_options') and not info.mplayer_options): files.append(info.files) for fileinfo in copy.copy(files):
class Playlist(Item): def __init__(self, name='', playlist=None, parent=None, display_type=None, random=False, build=False, autoplay=False, repeat=False): """ Init the playlist 1. a filename to a playlist file (e.g. m3u) 2. a list of items to play, this list can include - Items - filenames - a list (directoryname, recursive=0|1) @param build: create the playlist, this means unfold the directories """ Item.__init__(self, parent) self.type = 'playlist' self.menuw = None self.name = Unicode(name) if isstring(playlist) and not name: self.name = util.getname(playlist) # variables only for Playlist self.current_item = None self.playlist = playlist or [] self.autoplay = autoplay self.repeat = repeat self.display_type = display_type self.__build__ = False self.suffixlist = [] self.get_plugins = [] self.background_playlist = None if build: self.build() if self.name.find('Playlist') < 0: self.name = '%s Playlist' % self.name self.random = random def read_m3u(self, plsname): """ This is the (m3u) playlist reading function. @param plsname: The playlist filename @returns: The list of interesting lines in the playlist """ try: lines = util.readfile(plsname) except IOError: print 'Cannot open file "%s"' % plsname return 0 try: playlist_lines_dos = map(lambda l: l.strip(), lines) playlist_lines = filter(lambda l: len(l) > 0, playlist_lines_dos) except IndexError: print 'Bad m3u playlist file "%s"' % plsname return 0 (curdir, playlistname) = os.path.split(plsname) #XXX this may not work if the curdir is not accessible os.chdir(curdir) for i in range(0,len(playlist_lines)): if playlist_lines[i][0] == "#": continue line = playlist_lines[i] line = line.replace('\\', '/') # Fix MSDOS slashes try: if line.find('://') > 0: if playlist_lines[i-1].find('#EXTINF') > -1 and len(playlist_lines[i-1].split(","))>1: self.playlist.append((line,playlist_lines[i-1].split(",")[1])) else: self.playlist.append(line) elif os.path.isabs(line): if os.path.exists(line): self.playlist.append(line) else: if os.path.exists(os.path.abspath(os.path.join(curdir, line))): self.playlist.append(os.path.abspath(os.path.join(curdir, line))) except TypeError: print 'Bad m3u playlist line in "%s":%r' % (plsname, line) def read_pls(self, plsname): """ This is the (pls) playlist reading function. Arguments: plsname - the playlist filename Returns: The list of interesting lines in the playlist """ try: lines = util.readfile(plsname) except IOError: print String(_('Cannot open file "%s"')) % list return 0 playlist_lines_dos = map(lambda l: l.strip(), lines) playlist_lines = filter(lambda l: l[0:4] == 'File', playlist_lines_dos) for line in playlist_lines: numchars=line.find("=")+1 if numchars > 0: playlist_lines[playlist_lines.index(line)] = \ line[numchars:] (curdir, playlistname) = os.path.split(plsname) os.chdir(curdir) for line in playlist_lines: if line.endswith('\r\n'): line = line.replace('\\', '/') # Fix MSDOS slashes if line.find('://') > 0: self.playlist.append(line) elif os.path.isabs(line): if os.path.exists(line): self.playlist.append(line) else: if os.path.exists(os.path.abspath(os.path.join(curdir, line))): self.playlist.append(os.path.abspath(os.path.join(curdir, line))) def read_ssr(self, ssrname): """ This is the (ssr) slideshow reading function. File line format:: FileName: "image file name"; Caption: "caption text"; Delay: "sec" The caption and delay are optional. @param ssrname: the slideshow filename @returns: the list of interesting lines in the slideshow """ (curdir, playlistname) = os.path.split(ssrname) os.chdir(curdir) out_lines = [] try: lines = util.readfile(ssrname) except IOError: print String(_('Cannot open file "%s"')) % list return 0 playlist_lines_dos = map(lambda l: l.strip(), lines) playlist_lines = filter(lambda l: l[0] != '#', lines) # Here's where we parse the line. See the format above. for line in playlist_lines: tmp_list = [] ss_name = re.findall('FileName: \"(.*?)\"', line, re.I) ss_caption = re.findall('Caption: \"(.*?)\"', line, re.I) ss_delay = re.findall('Delay: \"(.*?)\"', line, re.I) if ss_name != []: if ss_caption == []: ss_caption += [""] if ss_delay == []: ss_delay += [5] for p in self.get_plugins: if os.path.isabs(ss_name[0]): curdir = ss_name[0] else: curdir = os.path.abspath(os.path.join(curdir, ss_name[0])) for i in p.get(self, [curdir]): if i.type == 'image': i.name = Unicode(ss_caption[0]) i.duration = int(ss_delay[0]) self.playlist.append(i) break self.autoplay = True def build(self): """ Build the playlist. Create a list of items and filenames. This function will load the playlist file or expand directories """ if self.suffixlist: # we called this function before return playlist = self.playlist self.playlist = [] for p in plugin.mimetype(self.display_type): #if self.display_type in p.display_type: # XXX self.display_type seems to be set to None # XXX Which prevents the str->Item from occuring # XXX This is a short-term fix I guess self.suffixlist += p.suffix() self.get_plugins.append(p) if isstring(playlist): # it's a filename with a playlist try: f=open(playlist, "r") line = f.readline() f.close if line.find("[playlist]") > -1: self.read_pls(playlist) elif line.find("[Slides]") > -1: self.read_ssr(playlist) else: self.read_m3u(playlist) except (OSError, IOError), e: print 'playlist error: %s' % e self.set_url(playlist) # self.playlist is a list of Items or strings (filenames) if not isstring(playlist): for i in playlist: if isinstance(i, Item): # Item object, correct parent i = copy.copy(i) i.parent = self self.playlist.append(i) elif isinstance(i, list) or isinstance(i, tuple) and len(i) == 2 and vfs.isdir(i[0]): # (directory, recursive=True|False) if i[1]: self.playlist += util.match_files_recursively(i[0], self.suffixlist, skip_password=True, follow_symlinks=config.SYS_FOLLOW_SYMLINKS) else: self.playlist += util.match_files(i[0], self.suffixlist) # set autoplay to True on such big lists self.autoplay = True else: # filename self.playlist.append(i) self.__build__ = True
def cache_cropdetect(): """ cache all video files for crop detection """ import encodingcore import kaa.metadata # load the fxd part of video import fxdhandler plugin.register_callback('fxditem', ['video'], 'movie', fxdhandler.parse_movie) plugin.register_callback('fxditem', ['video'], 'disc-set', fxdhandler.parse_disc_set) print 'checking cropdetect...................................', print sys.__stdout__.flush() suffixes = set(config.VIDEO_MPLAYER_SUFFIX).union(config.VIDEO_XINE_SUFFIX) files = [] fxd = [] for d in config.VIDEO_ITEMS: if d.__class__ != tuple: continue if not os.path.isdir(d[1]): continue try: files += util.match_files_recursively(d[1], suffixes) fxd += util.match_files_recursively(d[1], fxditem.mimetype.suffix()) except: pass def lowercaseSort(lhs, rhs): lhs, rhs = lhs.lower(), rhs.lower() return cmp(lhs, rhs) fxd.sort(lowercaseSort) files = util.misc.unique(files) files.sort(lowercaseSort) for filename in copy.copy(files): try: info = kaa.metadata.parse(filename) if not info: print 'ERROR: "%s" has no metadata' % (filename) continue if info.length < 5 or info.length > 5 * 60 * 60: print 'ERROR: "%s" has invalid length of %s' % (filename, info.length) continue encjob = encodingcore.EncodingJob(None, None, None, None) encjob.source = filename encjob.info = info encjob.length = info.length encjob._CropDetect() encjob.thread.join(10.0) if encjob.thread.isAlive(): encjob.thread.kill_pipe() encjob.thread.join(10.0) if encjob.thread.isAlive(): print 'CRITICAL: "%s" thread is still alive (pid %s)' % ( filename, encjob.pipe.pid) print print dir(encjob.thread) print print encjob.thread.__dict__ print continue else: print 'ERROR: "%s" cropdetect thread was killed' % ( filename) continue print filename, info.mime, info.length, encjob.crop, encjob.cropres except Exception, e: print 'ERROR: "%s" failed: %s' % (filename, e)
# Print the long imdb title for every movie. for movie in results: outp = u'%s\t: %s : %s' % (movie.movieID, fxd.imdb.get_imdbID(movie), movie['long imdb title']) print outp.encode(opts.encoding, 'replace') print '%s' % title.encode(opts.encoding) sys.exit(0) if opts.bulk: imdb_id = args[0] directory = args[1] # scan the dir for all VIDEO items items = [] for file in util.match_files_recursively(directory, config.VIDEO_SUFFIX): items.append( (file, os.path.split(file)[0], os.path.split(file)[1])) # now, for each item bulk fetch imdb data, and then write fxd to disk for fxd in fxd.retrieveImdbBulkSeriesData(imdb_id, items): fxd.writeFxd() sys.exit(0) if opts.tv: print "Searching IMDB for '%s' season:%s episode:%s..." % ( opts.tv, opts.season, opts.episode) result = fxd.retrieveImdbData(opts.tv, opts.season, opts.episode) if len(results) == 0: print 'No results'
def create_metadata(): """ scan files and create metadata """ import util.extendedmeta print checking('creating audio metadata'), sys.__stdout__.flush() for dir in config.AUDIO_ITEMS: if os.path.isdir(dir[1]): util.extendedmeta.AudioParser(dir[1], rescan=True) print 'done' print checking('creating playlist metadata'), sys.__stdout__.flush() pl = [] fxd = [] for dir in config.AUDIO_ITEMS: if os.path.isdir(dir[1]): pl += util.match_files_recursively(dir[1], playlist.mimetype.suffix()) fxd += util.match_files_recursively(dir[1], fxditem.mimetype.suffix()) elif isinstance(dir, list) or isinstance(dir, tuple): print print 'bad path: %s ' % dir[1], sys.__stdout__.flush() elif util.match_suffix(dir, playlist.mimetype.suffix()): pl.append(dir) elif util.match_suffix(dir, fxditem.mimetype.suffix()): fxd.append(dir) elif util.match_suffix(dir[1], playlist.mimetype.suffix()): pl.append(dir[1]) elif util.match_suffix(dir[1], fxditem.mimetype.suffix()): fxd.append(dir[1]) try: items = playlist.mimetype.get(None, util.misc.unique(pl)) # ignore fxd files for now, they can't store meta-info # for f in fxditem.mimetype.get(None, util.misc.unique(fxd)): # if f.type == 'playlist': # items.append(f) for i in items: util.extendedmeta.PlaylistParser(i) except: pass print 'done' print checking('checking database'), sys.__stdout__.flush() try: # The DB stuff import sqlite for dir in config.AUDIO_ITEMS: if os.path.isdir(dir[1]): util.extendedmeta.addPathDB(dir[1], dir[0], verbose=False) print 'done' except ImportError: print 'skipping' pass print checking('creating directory metadata'), sys.__stdout__.flush() subdirs = {'all': []} # get all subdirs for each type for type in activate_plugins: subdirs[type] = [] for d in getattr(config, '%s_ITEMS' % type.upper()): try: d = d[1] if d == os.path.sep: print 'ERROR: %s_ITEMS contains root directory, skipped.' % type continue except: pass if not os.path.isdir(d): continue rec = util.get_subdirs_recursively(d) subdirs['all'] += rec subdirs[type] += rec subdirs['all'] = util.misc.unique(subdirs['all']) subdirs['all'].sort(lambda l, o: cmp(l.upper(), o.upper())) # walk though each directory for s in subdirs['all']: if s.find(os.path.join(os.path.sep, '.')) > 0: continue # create the DirItems d = directory.DirItem(s, None) # rebuild meta-info d.create_metainfo() for type in activate_plugins: if subdirs.has_key(type) and s in subdirs[type]: d.display_type = type # scan again with display_type d.create_metainfo() print 'done'
class PluginInterface(plugin.MimetypePlugin): """ Plugin to handle all kinds of image items """ def __init__(self): plugin.MimetypePlugin.__init__(self) self.display_type = ['image'] # register the callbacks plugin.register_callback('fxditem', ['image'], 'slideshow', self.fxdhandler) # activate the mediamenu for image plugin.activate('mediamenu', level=plugin.is_active('image')[2], args='image') def suffix(self): """ return the list of suffixes this class handles """ return config.IMAGE_SUFFIX def get(self, parent, files): """ return a list of items based on the files """ items = [] if config.IMAGE_EXCLUDE: exclude_string = re.compile('|'.join(config.IMAGE_EXCLUDE)) for file in util.find_matches(files, config.IMAGE_SUFFIX): if config.IMAGE_EXCLUDE: if not re.search(exclude_string, file): items.append(ImageItem(file, parent)) files.remove(file) else: items.append(ImageItem(file, parent)) files.remove(file) return items def dirinfo(self, diritem): """ set information for a diritem based on album.xml """ #dirinfo = bins.DirInfo(diritem.dir) #if dirinfo.has_key('desc'): # info = dirinfo['desc'] #if dirinfo.has_key('sampleimage') and dirinfo['sampleimage']: # image = vfs.join(diritem.dir, dirinfo['sampleimage']) # if vfs.isfile(image): # diritem.image = image #if dirinfo.has_key('title') and dirinfo['title']: # diritem.name = dirinfo['title'] def fxdhandler(self, fxd, node): """ parse image specific stuff from fxd files:: <?xml version="1.0" ?> <freevo> <slideshow title="foo" random="1|0" repeat="1|0"> <cover-img>foo.jpg</cover-img> <background-music random="1|0"> <directory recursive="1|0">path</directory> <file>filename</file> </background-music> <files> <directory recursive="1|0" duration="10">path</directory> <file duration="0">filename</file> </files> <info> <description>A nice description</description> </info> </slideshow> </freevo> """ items = [] dirname = os.path.dirname(fxd.getattr(None, 'filename', '')) children = fxd.get_children(node, 'files') if children: children = children[0].children for child in children: try: citems = [] fname = os.path.join(dirname, String(fxd.gettext(child))) if child.name == 'directory': if fxd.getattr(child, 'recursive', 0): f = util.match_files_recursively(fname, self.suffix(), skip_password=True) else: f = util.match_files(fname, self.suffix()) citems = self.get(None, f) elif child.name == 'file': citems = self.get(None, [fname]) duration = fxd.getattr(child, 'duration', 0) if duration: for i in citems: i.duration = duration items += citems except OSError, e: print 'slideshow error:', e pl = Playlist('', items, fxd.getattr(None, 'parent', None), random=fxd.getattr(node, 'random', 0), repeat=fxd.getattr(node, 'repeat', 0)) pl.autoplay = True pl.name = fxd.getattr(node, 'title') pl.image = fxd.childcontent(node, 'cover-img') if pl.image: pl.image = vfs.join(vfs.dirname(fxd.filename), pl.image) # background music children = fxd.get_children(node, 'background-music') if children: random = fxd.getattr(children[0], 'random', 0) children = children[0].children files = [] suffix = [] for p in plugin.mimetype('audio'): suffix += p.suffix() for child in children: try: fname = os.path.join(dirname, fxd.gettext(child)) if child.name == 'directory': if fxd.getattr(child, 'recursive', 0): files += util.match_files_recursively( fname, suffix, skip_password=True) else: files += util.match_files(fname, suffix) elif child.name == 'file': files.append(fname) except OSError, e: print 'playlist error:', e
def create_metadata(): """ scan files and create metadata """ import util.extendedmeta print checking('creating audio metadata'), sys.__stdout__.flush() for dir in config.AUDIO_ITEMS: if os.path.isdir(dir[1]): util.extendedmeta.AudioParser(dir[1], rescan=True) print 'done' print checking('creating playlist metadata'), sys.__stdout__.flush() pl = [] fxd = [] for dir in config.AUDIO_ITEMS: if os.path.isdir(dir[1]): pl += util.match_files_recursively(dir[1], playlist.mimetype.suffix()) fxd += util.match_files_recursively(dir[1], fxditem.mimetype.suffix()) elif isinstance(dir, list) or isinstance(dir, tuple): print print 'bad path: %s ' % dir[1], sys.__stdout__.flush() elif util.match_suffix(dir, playlist.mimetype.suffix()): pl.append(dir) elif util.match_suffix(dir, fxditem.mimetype.suffix()): fxd.append(dir) elif util.match_suffix(dir[1], playlist.mimetype.suffix()): pl.append(dir[1]) elif util.match_suffix(dir[1], fxditem.mimetype.suffix()): fxd.append(dir[1]) try: items = playlist.mimetype.get(None, util.misc.unique(pl)) # ignore fxd files for now, they can't store meta-info # for f in fxditem.mimetype.get(None, util.misc.unique(fxd)): # if f.type == 'playlist': # items.append(f) for i in items: util.extendedmeta.PlaylistParser(i) except: pass print 'done' print checking('checking database'), sys.__stdout__.flush() try: # The DB stuff import sqlite for dir in config.AUDIO_ITEMS: if os.path.isdir(dir[1]): util.extendedmeta.addPathDB(dir[1], dir[0], verbose=False) print 'done' except ImportError: print 'skipping' pass print checking('creating directory metadata'), sys.__stdout__.flush() subdirs = { 'all': [] } # get all subdirs for each type for type in activate_plugins: subdirs[type] = [] for d in getattr(config, '%s_ITEMS' % type.upper()): try: d = d[1] if d == os.path.sep: print 'ERROR: %s_ITEMS contains root directory, skipped.' % type continue except: pass if not os.path.isdir(d): continue rec = util.get_subdirs_recursively(d) subdirs['all'] += rec subdirs[type] += rec subdirs['all'] = util.misc.unique(subdirs['all']) subdirs['all'].sort(lambda l, o: cmp(l.upper(), o.upper())) # walk though each directory for s in subdirs['all']: if s.find(os.path.join(os.path.sep, '.')) > 0: continue # create the DirItems d = directory.DirItem(s, None) # rebuild meta-info d.create_metainfo() for type in activate_plugins: if subdirs.has_key(type) and s in subdirs[type]: d.display_type = type # scan again with display_type d.create_metainfo() print 'done'