def insert(self, metadata, path): statement = 'INSERT INTO artists (name) VALUES (?)' try: self._execute(statement, [metadata.get('artist', default='unknown')]) self.db.commit() except sqlite3.IntegrityError as e: print e pass statement = 'SELECT id FROM artists WHERE name=?' artist_id = self._execute(statement, [metadata.get('artist', default='unknown')]) artist_id = artist_id[0][0] statement = 'INSERT INTO albums (album, artist_id) VALUES (?, ?)' try: self._execute(statement, [metadata.get('album', default='unknown'), artist_id]) self.db.commit() except sqlite3.IntegrityError as e: print e pass statement = 'SELECT id FROM albums WHERE album=?' album_id = self._execute(statement, [metadata.get('album', default='unknown')]) album_id = album_id[0][0] a_path = os.path.basename(path) print a_path statement = 'INSERT INTO Songs (song, album_id, path, Album, Artist) VALUES (?, ?, ?, ?, ?)' self._execute(statement, [metadata.get('title', default='unknown'), album_id, a_path, metadata.get('album', default='unknown'), metadata.get('artist', default='unknown')]) self.db.commit()
def parse(self, filename, metadata): """ Return the Movie object for the filename if it is in the database """ if not os.path.exists(filename): return data = [] # search based on the movie hash if metadata.get('hash'): hash = u'%s|%s' % (metadata.get('hash'), os.path.getsize(filename)) data = self._db.query(type='hash', value=hash) if data: data = self._db.query(type='movie', moviedb=data[0]['moviedb']) # search based on imdb id in nfo file if not data: nfo = os.path.splitext(filename)[0] + '.nfo' if os.path.exists(nfo): match = IMDB_REGEXP.search(open(nfo).read()) if match: data = self._db.query(type='movie', imdb=u'tt' + match.groups()[0]) # not found if not data: return None # return result return Movie(data[0]['data'], self)
def add_movie_by_id(self, filename, metadata, id): """ Match movie id to the given filename for future lookups """ if not metadata.get('hash') or not metadata.get('filesize'): yield False # check if we already have that movie in the db data = self._db.query(type='movie', moviedb=id) if not data: # We are limited to 30 requests per 10 seconds. Wait one # second before doing the searches. yield kaa.delay(1) result = yield self._server_call('movie/%s' % id) if result: result['/images'] = yield self._server_call('movie/%s/images' % id) result['/casts'] = yield self._server_call('movie/%s/casts' % id) result['/keywords'] = yield self._server_call('movie/%s/keywords' % id) movie = Movie(result, self) if movie.poster and not os.path.isfile(movie.poster): data = yield (yield movie.get_all_posters())[0].fetch() open(movie.poster, 'w').write(data) if movie.image and not os.path.isfile(movie.image): data = yield (yield movie.get_all_images())[0].fetch() open(movie.image, 'w').write(data) self._db.add('movie', moviedb=int(id), name=movie.name, imdb=movie.imdb, data=movie._data) self._db.commit() data = self._db.query(type='movie', moviedb=id) if data: self._db.add('hash', moviedb=id, value=u'%s|%s' % (metadata.get('hash'), metadata.filesize)) self._db.commit() yield True
def search(filename, metadata=None): """ Search the given filename in the web. If metadata is None it will be created using kaa.metadata. Each dictionary-like object is allowed. """ if not metadata: metadata = kaa.metadata.parse(filename) if metadata.get('series', None): return backends['thetvdb'].search(metadata.get('series')) return backends['themoviedb'].search(filename, metadata)
def search(filename, metadata=None, backend='thetvdb'): """ Search the given filename in the web. If metadata is None it will be created using kaa.metadata. Each dictionary-like object is allowed. """ if not backend in backends: return None if not metadata: metadata = kaa.metadata.parse(filename) if metadata.get('series', None): return backends[backend].search(metadata.get('series'), filename, metadata) return None
def parse(filename, metadata=None): """ Parse the given filename and return information from the db. If metadata is None it will be created using kaa.metadata. Each dictionary-like object is allowed. """ if not metadata: metadata = kaa.metadata.parse(filename) if metadata.get('series', None): info = backends['thetvdb'].parse(filename, metadata) if info: if metadata.get('season', None) and metadata.get('episode'): info = info.get_season(metadata.get('season')).get_episode(metadata.get('episode')) return info return backends['themoviedb'].parse(filename, metadata)
def step(self): """ Process one job """ if not self.jobs or kaa.main.is_shutting_down(): return False job = self.jobs.pop(0) if job.url and not os.path.isfile(job.filename): # we need to download first self.download(job) self.schedule_next(fast=True) return True for size in ('large', 'normal'): # iterate over the sizes imagefile = job.imagefile % size if not os.path.isfile(imagefile): break metadata = kaa.metadata.parse(imagefile) if not metadata: break mtime = metadata.get('Thumb::MTime') try: if not mtime or mtime != str(os.stat(job.filename)[stat.ST_MTIME]): # needs an update break except (IOError, OSError): log.exception('os.stat') break else: # we did not break out of the loop, this means we have both thumbnails # and the mtime is also correct. Refuse the recreate thumbnail self.notify_client(job) self.schedule_next(fast=True) return True log.info('create thumbnail for %s -> %s', job.filename, job.imagefile) if job.filename.lower().endswith('jpg'): # try epeg for fast thumbnailing try: if os.stat(job.filename)[stat.ST_SIZE] < 1024*1024: raise ValueError('no photo, use imlib2') libthumb.epeg(job.filename, job.imagefile % 'large', (256, 256)) libthumb.epeg(job.filename, job.imagefile % 'normal', (128, 128)) self.notify_client(job) self.schedule_next() return True except (IOError, OSError, ValueError): pass try: # try normal imlib2 thumbnailing libthumb.png(job.filename, job.imagefile % 'large', (256, 256)) libthumb.png(job.filename, job.imagefile % 'normal', (128, 128)) self.notify_client(job) self.schedule_next() return True except (IOError, OSError, ValueError), e: pass
def add_movie_by_id(self, filename, metadata, id): """ Match movie id to the given filename for future lookups """ if not metadata.get('hash') or not metadata.get('filesize'): yield False # check if we already have that movie in the db data = self._db.query(type='movie', moviedb=id) if not data: # We are limited to 30 requests per 10 seconds. Wait one # second before doing the searches. yield kaa.delay(1) result = yield self._server_call('movie/%s' % id) if result: result['/images'] = yield self._server_call('movie/%s/images' % id) result['/casts'] = yield self._server_call('movie/%s/casts' % id) result['/keywords'] = yield self._server_call( 'movie/%s/keywords' % id) movie = Movie(result, self) if movie.poster and not os.path.isfile(movie.poster): data = yield (yield movie.get_all_posters())[0].fetch() open(movie.poster, 'w').write(data) if movie.image and not os.path.isfile(movie.image): data = yield (yield movie.get_all_images())[0].fetch() open(movie.image, 'w').write(data) self._db.add('movie', moviedb=int(id), name=movie.name, imdb=movie.imdb, data=movie._data) self._db.commit() data = self._db.query(type='movie', moviedb=id) if data: self._db.add('hash', moviedb=id, value=u'%s|%s' % (metadata.get('hash'), metadata.filesize)) self._db.commit() yield True
def _device_add_to_database(self, metadata, devdict): """ Add the device to the database """ yield kaa.inprogress(self._db.read_lock) id = devdict.get('beacon.id') if devdict.get('volume.is_disc') == True and metadata and \ metadata.get('mime') in ('video/vcd', 'video/dvd'): # pass rom drive type = metadata['mime'][6:] log.info('detect %s as %s' % (devdict.get('beacon.id'), type)) mid = self._db.add_object("media", name=devdict.get('beacon.id'), content=type)['id'] vid = self._db.add_object("video", name="", parent=('media', mid), title=unicode(get_title(metadata['label'])), media = mid)['id'] for track in metadata.tracks: self._db.add_object('track_%s' % type, name='%02d' % track.trackno, parent=('video', vid), media=mid, chapters=track.chapters, length=track.length, audio=[ x.convert() for x in track.audio ], subtitles=[ x.convert() for x in track.subtitles ]) yield True if devdict.get('volume.disc.has_audio') and metadata: # Audio CD log.info('detect %s as audio cd' % devdict.get('beacon.id')) mid = self._db.add_object("media", name=devdict.get('beacon.id'), content='cdda')['id'] aid = self._db.add_object("audio", name='', title = metadata.get('title'), artist = metadata.get('artist'), parent=('media', mid), media = mid)['id'] for track in metadata.tracks: self._db.add_object('track_cdda', name=str(track.trackno), title=track.get('title'), artist=track.get('artist'), parent=('audio', aid), media=mid) yield True # filesystem log.info('detect %s as filesystem' % devdict.get('beacon.id')) mid = self._db.add_object("media", name=devdict.get('beacon.id'), content='file')['id'] mtime = 0 if devdict.get('block.device'): mtime = os.stat(devdict.get('block.device'))[stat.ST_MTIME] self._db.add_object("dir", name="", parent=('media', mid), media=mid, mtime=mtime) yield True
def parse(filename, metadata=None): """ Parse the given filename and return information from the db. If metadata is None it will be created using kaa.metadata. Each dictionary-like object is allowed. """ if not metadata: metadata = kaa.metadata.parse(filename) if not metadata.get('series', None): return None for db in backends.values(): result = db.get_entry_from_metadata(metadata) if result and isinstance(result, core.Episode): return result
def parse(filename, metadata=None): """ Parse the given filename and return information from the db. If metadata is None it will be created using kaa.metadata. Each dictionary-like object is allowed. """ if not initialized: raise RuntimeError('kaa.webmetadata not initialized') if not os.path.isfile(filename): return None if not metadata: metadata = kaa.metadata.parse(filename) if metadata.get('series', None): return tv.parse(filename, metadata) return movie.parse(filename, metadata)
def _add_device_to_db(self, metadata, dev): """ Add the device to the database """ yield kaa.inprogress(self._db.read_lock) # FIXME: check if the device is still valid # FIXME: handle failed dvd detection id = dev.get('beacon.id') if dev.get('volume.is_disc') == True and metadata and \ metadata.get('mime') in ('video/vcd', 'video/dvd'): # pass rom drive type = metadata['mime'][6:] log.info('detect %s as %s' % (id, type)) mid = self._db.add_object("media", name=id, content=type)['id'] vid = self._db.add_object("video", name="", parent=('media', mid), title=unicode(get_title(metadata['label'])), media = mid)['id'] for track in metadata.tracks: self._db.add_object('track_%s' % type, name='%02d' % track.trackno, parent=('video', vid), media=mid, chapters=track.chapters, length=track.length, audio=[ x.convert() for x in track.audio ], subtitles=[ x.convert() for x in track.subtitles ]) return if dev.get('volume.disc.has_audio') and metadata: # Audio CD log.info('detect %s as audio cd' % id) mid = self._db.add_object("media", name=id, content='cdda')['id'] aid = self._db.add_object("audio", name='', title = metadata.get('title'), artist = metadata.get('artist'), parent=('media', mid), media = mid)['id'] for track in metadata.tracks: self._db.add_object('track_cdda', name=str(track.trackno), title=track.get('title'), artist=track.get('artist'), parent=('audio', aid), media=mid) return # filesystem log.info('detect %s as %s' % (id, dev.get('volume.fstype', '<unknown filesystem>'))) mid = self._db.add_object("media", name=id, content='file')['id'] mtime = 0 # FIXME: wrong for / if dev.get('block.device'): mtime = os.stat(dev.get('block.device'))[stat.ST_MTIME] self._db.add_object("dir", name="", parent=('media', mid), media=mid, mtime=mtime)
def search(filename, metadata=None): """ Search the given filename in the web. If metadata is None it will be created using kaa.metadata. Each dictionary-like object is allowed. """ if not initialized: raise RuntimeError('kaa.webmetadata not initialized') if not os.path.isfile(filename): yield {} if not metadata: metadata = kaa.metadata.parse(filename) if metadata.get('series', None): yield (yield tv.search(filename, metadata)) if metadata['length'] and metadata['length'] > 60 * 60: # at least one hour yield (yield movie.search(filename, metadata)) yield {}
def _get_thumbnail(self, type='any', check_mtime=False): """ Get the filename to the thumbnail. DO NOT USE OUTSIDE OF BEACON :param type: 'normal', 'large' or 'any' :param check_mtime: Check the file modification time against the information stored in the thumbnail. If the file has changed, the thumbnail will not be returned. :returns: full path to thumbnail file or None """ # We can thumbnail a file of course, but also a directory, such as in # the case of a DVD tree (that contains VIDEO_TS), which kaa.metadata # will recognize as a video "file." try: statinfo = os.stat(self.name) except OSError: return None if not stat.S_ISREG(statinfo.st_mode) and not stat.S_ISDIR(statinfo.st_mode): return None if check_mtime: image = self._get_thumbnail(type) if image: metadata = kaa.metadata.parse(image) if metadata: mtime = metadata.get('Thumb::MTime') if mtime == str(statinfo[stat.ST_MTIME]): return image # mtime check failed, return no image return None if type == 'any': image = self._get_thumbnail(LARGE, check_mtime) if image: return image type = NORMAL if os.path.isfile(self._thumbnail % type): return self._thumbnail % type if not type == 'fail/beacon': return self._get_thumbnail('fail/beacon', check_mtime) return None
def _thumbnailer(self): while self.jobs and not kaa.main.is_stopped(): job = self.jobs.pop(0) log.info('Now processing video thumbnail job: file=%s, qlen=%d', job.filename, len(self.jobs)) for size in ('large', 'normal'): imagefile = job.imagefile % size if not os.path.isfile(imagefile): # One (or both) of the large and normal thumbnails don't exist, so # we must generate. break metadata = kaa.metadata.parse(imagefile) mtime = metadata.get('Thumb::MTime') if mtime != str(os.stat(job.filename)[stat.ST_MTIME]): # File mtime doesn't match the stored mtime in the thumbnail metadata, # so must regenerate. break else: # No thumb generation needed. continue # XXX: this isn't very effective because we can't throttle mplayer # once it's running. We run mplayer at the lowest possible priority # (if schedtool is available), so that'll have to suffice. # IDEA: actually we can throttle mplayer, if we remove -benchmark and pass -fps. delay = scheduler.next(self.config.scheduler.policy) * self.config.scheduler.multiplier if delay: # too much CPU load, slow down yield kaa.delay(delay) try: success = yield self._generate(job) except Exception: success = False if not success: # Something went awry, generate a failed thumbnail file. self.create_failed(job) # Notify client via rpc that this thumbnail job is done. self.notify_client(job)
def _get_thumbnail(self, type='any', check_mtime=False): """ Get the filename to the thumbnail. DO NOT USE OUTSIDE OF BEACON :param type: 'normal', 'large' or 'any' :param check_mtime: Check the file modification time against the information stored in the thumbnail. If the file has changed, the thumbnail will not be returned. :returns: full path to thumbnail file or None """ # We can thumbnail a file of course, but also a directory, such as in # the case of a DVD tree (that contains VIDEO_TS), which kaa.metadata # will recognize as a video "file." try: statinfo = os.stat(self.name) except OSError: return None if not stat.S_ISREG(statinfo.st_mode) and not stat.S_ISDIR( statinfo.st_mode): return None if check_mtime: image = self._get_thumbnail(type) if image: metadata = kaa.metadata.parse(image) if metadata: mtime = metadata.get('Thumb::MTime') if mtime == str(statinfo[stat.ST_MTIME]): return image # mtime check failed, return no image return None if type == 'any': image = self._get_thumbnail(LARGE, check_mtime) if image: return image type = NORMAL if os.path.isfile(self._thumbnail % type): return self._thumbnail % type if not type == 'fail/beacon': return self._get_thumbnail('fail/beacon', check_mtime) return None
def _device_add_to_database(self, metadata, devdict): """ Add the device to the database """ yield kaa.inprogress(self._db.read_lock) id = devdict.get('beacon.id') if devdict.get('volume.is_disc') == True and metadata and \ metadata.get('mime') in ('video/vcd', 'video/dvd'): # pass rom drive type = metadata['mime'][6:] log.info('detect %s as %s' % (devdict.get('beacon.id'), type)) mid = self._db.add_object("media", name=devdict.get('beacon.id'), content=type)['id'] vid = self._db.add_object("video", name="", parent=('media', mid), title=unicode( get_title(metadata['label'])), media=mid)['id'] for track in metadata.tracks: self._db.add_object( 'track_%s' % type, name='%02d' % track.trackno, parent=('video', vid), media=mid, chapters=track.chapters, length=track.length, audio=[x.convert() for x in track.audio], subtitles=[x.convert() for x in track.subtitles]) yield True if devdict.get('volume.disc.has_audio') and metadata: # Audio CD log.info('detect %s as audio cd' % devdict.get('beacon.id')) mid = self._db.add_object("media", name=devdict.get('beacon.id'), content='cdda')['id'] aid = self._db.add_object("audio", name='', title=metadata.get('title'), artist=metadata.get('artist'), parent=('media', mid), media=mid)['id'] for track in metadata.tracks: self._db.add_object('track_cdda', name=str(track.trackno), title=track.get('title'), artist=track.get('artist'), parent=('audio', aid), media=mid) yield True # filesystem log.info('detect %s as filesystem' % devdict.get('beacon.id')) mid = self._db.add_object("media", name=devdict.get('beacon.id'), content='file')['id'] mtime = 0 if devdict.get('block.device'): mtime = os.stat(devdict.get('block.device'))[stat.ST_MTIME] self._db.add_object("dir", name="", parent=('media', mid), media=mid, mtime=mtime) yield True
def _parse(db, item, mtime): """ Parse the item, this can take a while. """ produced_load = 0 try: # # Parent checking # parent = item._beacon_parent if not parent._beacon_id: # There is a parent without id, update the parent now. r = parse(db, parent) if isinstance(r, kaa.InProgress): yield r if not parent._beacon_id: # This should never happen raise AttributeError('parent for %s has no dbid' % item) # we had no parent id which we have now. Restart the whole # parsing process. maye this item was in the db already r = parse(db, parent) if isinstance(r, kaa.InProgress): r = yield r yield r # # Metadata parsing # t1 = time.time() # FIXME: add force parameter from config file: # - always force (slow but best result) # - never force (faster but maybe wrong) # - only force on media 1 (good default) # Parse metadata in an extra named thread metadata = yield parse_thread(item.filename) if not metadata: metadata = {} attributes = { 'mtime': mtime, 'image': metadata.get('image') } if metadata.get('media') == kaa.metadata.MEDIA_DISC and \ metadata.get('subtype') in db.list_object_types(): type = metadata['subtype'] if metadata.get('type'): attributes['scheme'] = metadata.get('type').lower() item._beacon_isdir = False elif media_types.get(metadata.get('media')) in db.list_object_types(): type = media_types.get(metadata['media']) elif item._beacon_isdir: type = 'dir' else: type = 'file' if item._beacon_id and type != item._beacon_id[0]: # The item changed its type. Adjust the db yield kaa.inprogress(db.read_lock) data = db.update_object_type(item._beacon_id, type) if not data: log.error('item to change not in the db anymore') log.info('change item %s to %s' % (item._beacon_id, type)) item._beacon_database_update(data) # # Thumbnail / Cover / Image stuff. # produced_load = 1 if type == 'dir': attributes['image_from_items'] = False if not attributes.get('image'): for cover in ('cover.jpg', 'cover.png'): if os.path.isfile(item.filename + cover): attributes['image'] = item.filename + cover break # TODO: do some more stuff here: # Audio directories may have a different cover if there is only # one jpg in a dir of mp3 files or a files with 'front' in the name. # They need to be added here as special kind of cover elif type == 'image': attributes['image'] = item.filename if metadata.get('thumbnail'): t = thumbnail.Thumbnail(item.filename, item._beacon_media) if t.needs_update: # only store the normal version try: produced_load = 2 t.normal = kaa.imlib2.open_from_memory(metadata.get('thumbnail')) except (ValueError, IOError): log.exception('image thumbnail') else: base = os.path.splitext(item.filename)[0] if type == 'video' and not attributes.get('image') and thumbnail.SUPPORT_VIDEO: attributes['image'] = item.filename if metadata.get('thumbnail') and not attributes.get('image'): attributes['image'] = item.filename t = thumbnail.Thumbnail(item.filename, item._beacon_media) try: produced_load = 2 t.image = kaa.imlib2.open_from_memory(metadata['thumbnail']) except (ValueError, IOError): log.exception('raw thumbnail') for ext in ('.jpg', '.png'): if os.path.isfile(base + ext): attributes['image'] = base + ext break if os.path.isfile(item.filename + ext): attributes['image'] = item.filename + ext break # # Type specific attributes # if type == 'video': # Normally db.add_object() will take care of assigning type # attributes from metadata, but some attributes for videos # aren't at the top-level attribute object. For video # dimensions, take the dimensions of the first video track # (of the longest title, if applicable). video = None if metadata.get('video'): video = metadata.video[0] elif metadata.get('tracks'): # Find the longest title with a video track. for title in sorted(metadata.tracks, key=lambda t: -t.length): if title.get('video'): video = title.video[0] break if video: attributes['width'] = video.get('width') attributes['height'] = video.get('height') attributes['series'] = metadata.series attributes['season'] = metadata.season attributes['episode'] = metadata.episode attributes['metadata'] = metadata # now call extention plugins ext = os.path.splitext(item.filename)[1] for function in extention_plugins.get(ext, []) + extention_plugins.get(None, []): function(item, attributes, type) yield kaa.inprogress(db.read_lock) if attributes.get('image'): # create thumbnail t = thumbnail.Thumbnail(attributes.get('image'), item._beacon_media) if t.needs_update and (not type == 'video' or not hasattr(item, 'filename') or utils.do_thumbnail(item.filename)): t.create(t.PRIORITY_LOW) # # Database code # # add kaa.metadata results, the db module will add everything known # to the db. After that add or update the database. # if item._beacon_id: # Update old db entry db.update_object(item._beacon_id, **attributes) item._beacon_data.update(attributes) else: # Create new entry obj = db.add_object(type, name=item._beacon_data['name'], parent=parent, overlay=item._beacon_overlay, **attributes) item._beacon_database_update(obj) # # Additional track handling # if hasattr(metadata, 'tracks'): # The item has tracks, e.g. a dvd image on hd. if not metadata.get('type'): log.error('%s metadata has no type', item) yield produced_load # delete all known tracks before adding new result = yield db.query(parent=item) for track in result: db.delete_object(track) if not 'track_%s' % metadata.get('type').lower() in \ db.list_object_types(): key = metadata.get('type').lower() log.error('track_%s not in database keys', key) yield produced_load type = 'track_%s' % metadata.get('type').lower() for track in metadata.tracks: db.add_object(type, name=str(track.trackno), parent=item, metadata=track) # parsing done log.info('scan %s (%0.3f)' % (item, time.time() - t1)) except GeneratorExit: # Don't catch this, otherwise if the coroutine is aborted you get # "_parse() ignored GeneratorExit" raise except Exception, e: log.exception('parser error: %s', item)
def _parse(db, item, mtime): """ Parse the item, this can take a while. """ produced_load = 0 try: # # Parent checking # parent = item._beacon_parent if not parent._beacon_id: # There is a parent without id, update the parent now. r = parse(db, parent) if isinstance(r, kaa.InProgress): yield r if not parent._beacon_id: # This should never happen raise AttributeError('parent for %s has no dbid' % item) # we had no parent id which we have now. Restart the whole # parsing process. maye this item was in the db already r = parse(db, parent) if isinstance(r, kaa.InProgress): r = yield r yield r # # Metadata parsing # t1 = time.time() # FIXME: add force parameter from config file: # - always force (slow but best result) # - never force (faster but maybe wrong) # - only force on media 1 (good default) # Parse metadata in an extra named thread metadata = yield parse_thread(item.filename) if not metadata: metadata = {} attributes = {'mtime': mtime, 'image': metadata.get('image')} if metadata.get('media') == kaa.metadata.MEDIA_DISC and \ metadata.get('subtype') in db.list_object_types(): type = metadata['subtype'] if metadata.get('type'): attributes['scheme'] = metadata.get('type').lower() item._beacon_isdir = False elif media_types.get(metadata.get('media')) in db.list_object_types(): type = media_types.get(metadata['media']) elif item._beacon_isdir: type = 'dir' else: type = 'file' if item._beacon_id and type != item._beacon_id[0]: # The item changed its type. Adjust the db yield kaa.inprogress(db.read_lock) data = db.update_object_type(item._beacon_id, type) if not data: log.info('item to change not in the db anymore') log.info('change item %s to %s' % (item._beacon_id, type)) item._beacon_database_update(data) # # Thumbnail / Cover / Image stuff. # produced_load = 1 if type == 'dir': # If the image was detected by the parser, do not override # it in add_directory_attributes attributes['image_from_parser'] = bool(attributes.get('image')) elif type == 'image': attributes['image'] = item.filename if metadata.get('thumbnail'): t = thumbnail.Thumbnail(item.filename, item._beacon_media) if t.needs_update: # only store the normal version try: produced_load = 2 t.normal = kaa.imlib2.open_from_memory( metadata.get('thumbnail')) except (ValueError, IOError): log.exception('image thumbnail') else: base = os.path.splitext(item.filename)[0] if type == 'video' and not attributes.get( 'image') and thumbnail.SUPPORT_VIDEO: attributes['image'] = item.filename if metadata.get('thumbnail') and not attributes.get('image'): attributes['image'] = item.filename t = thumbnail.Thumbnail(item.filename, item._beacon_media) try: produced_load = 2 t.image = kaa.imlib2.open_from_memory( metadata['thumbnail']) except (ValueError, IOError): log.exception('raw thumbnail') for ext in ('.jpg', '.png'): if os.path.isfile(base + ext): if type == 'video': attributes['poster'] = base + ext else: attributes['image'] = base + ext break if os.path.isfile(item.filename + ext): if type == 'video': attributes['poster'] = item.filename + ext else: attributes['image'] = item.filename + ext break # # Type specific attributes # if type == 'video': # Normally db.add_object() will take care of assigning type # attributes from metadata, but some attributes for videos # aren't at the top-level attribute object. For video # dimensions, take the dimensions of the first video track # (of the longest title, if applicable). video = None if metadata.get('video'): video = metadata.video[0] elif metadata.get('tracks'): # Find the longest title with a video track. for title in sorted(metadata.tracks, key=lambda t: -t.length): if title.get('video'): video = title.video[0] break if video: attributes['width'] = video.get('width') attributes['height'] = video.get('height') attributes['series'] = metadata.series attributes['season'] = metadata.season attributes['episode'] = metadata.episode attributes['metadata'] = metadata # now call extention plugins ext = os.path.splitext(item.filename)[1] for function in extention_plugins.get(ext, []) + extention_plugins.get( None, []): function(item, attributes, type) yield kaa.inprogress(db.read_lock) if attributes.get('image'): # create thumbnail t = thumbnail.Thumbnail(attributes.get('image'), item._beacon_media) if t.needs_update and (not type == 'video' or not hasattr(item, 'filename') or utils.do_thumbnail(item.filename)): t.create(t.PRIORITY_LOW) # # Database code # # add kaa.metadata results, the db module will add everything known # to the db. After that add or update the database. # if item._beacon_id: # Update old db entry db.update_object(item._beacon_id, **attributes) item._beacon_data.update(attributes) else: # check if for some reasons the same item was parsed # parallel. If so, do not add it again and reuse the id entry = db._db.query(parent=parent._beacon_id, name=item._beacon_data['name']) if entry: # Update old db entry log.error('item already in db, re-use beacon_id') db.update_object((entry[0]['type'], entry[0]['id']), **attributes) obj = db._db.query(parent=parent._beacon_id, name=item._beacon_data['name'])[0] else: # Create new entry obj = db.add_object(type, name=item._beacon_data['name'], parent=parent, **attributes) item._beacon_database_update(obj) # # Additional track handling # if hasattr(metadata, 'tracks'): # The item has tracks, e.g. a dvd image on hd. if not metadata.get('type'): log.error('%s metadata has no type', item) yield produced_load # delete all known tracks before adding new result = yield db.query(parent=item) for track in result: db.delete_object(track) if not 'track_%s' % metadata.get('type').lower() in \ db.list_object_types(): key = metadata.get('type').lower() log.error('track_%s not in database keys', key) yield produced_load type = 'track_%s' % metadata.get('type').lower() for track in metadata.tracks: db.add_object(type, name=str(track.trackno), parent=item, metadata=track) # parsing done log.info('scan %s (%0.3f)' % (item, time.time() - t1)) except GeneratorExit: # Don't catch this, otherwise if the coroutine is aborted you get # "_parse() ignored GeneratorExit" raise except Exception, e: log.exception('parser error: %s', item)
def step(self): """ Process one job """ if not self.jobs or kaa.main.is_shutting_down(): return False job = self.jobs.pop(0) if job.url and not os.path.isfile(job.filename): # we need to download first self.download(job) self.schedule_next(fast=True) return True for size in ('large', 'normal'): # iterate over the sizes imagefile = job.imagefile % size if not os.path.isfile(imagefile): break metadata = kaa.metadata.parse(imagefile) if not metadata: break mtime = metadata.get('Thumb::MTime') try: if not mtime or mtime != str( os.stat(job.filename)[stat.ST_MTIME]): # needs an update break except (IOError, OSError): log.exception('os.stat') break else: # we did not break out of the loop, this means we have both thumbnails # and the mtime is also correct. Refuse the recreate thumbnail self.notify_client(job) self.schedule_next(fast=True) return True log.info('create thumbnail for %s -> %s', job.filename, job.imagefile) if job.filename.lower().endswith('jpg'): # try epeg for fast thumbnailing try: if os.stat(job.filename)[stat.ST_SIZE] < 1024 * 1024: raise ValueError('no photo, use imlib2') libthumb.epeg(job.filename, job.imagefile % 'large', (256, 256)) libthumb.epeg(job.filename, job.imagefile % 'normal', (128, 128)) self.notify_client(job) self.schedule_next() return True except (IOError, OSError, ValueError): pass try: # try normal imlib2 thumbnailing libthumb.png(job.filename, job.imagefile % 'large', (256, 256)) libthumb.png(job.filename, job.imagefile % 'normal', (128, 128)) self.notify_client(job) self.schedule_next() return True except (IOError, OSError, ValueError), e: pass