def update(self, metadata, media, lang): # Clear out the title to ensure stale data doesn't clobber other agents' contributions. metadata.title = None if shouldFindExtras(): extra_type_map = getExtraTypeMap() artist_file_dirs = [] artist_extras = {} # First look for track extras. checked_tag = False for album in media.children: for track in album.children: part = helpers.unicodize(track.items[0].parts[0].file) findTrackExtra(part, extra_type_map, artist_extras) artist_file_dirs.append(os.path.dirname(part)) # Look for artist sort field. if checked_tag == False: checked_tag = True audio_helper = audiohelpers.AudioHelpers(part) if audio_helper and hasattr(audio_helper, 'get_artist_sort_title'): artist_sort_title = audio_helper.get_artist_sort_title() if artist_sort_title and hasattr(metadata, 'title_sort'): metadata.title_sort = artist_sort_title # Now go through this artist's directories looking for additional extras. for artist_file_dir in set(artist_file_dirs): findArtistExtras(helpers.unicodize(artist_file_dir), extra_type_map, artist_extras, media.title) for extra in sorted(artist_extras.values(), key = lambda v: (getExtraSortOrder()[type(v)], v.title)): metadata.extras.add(extra)
def updateAlbum(metadata, media, lang, find_extras=False, artist_extras={}, extra_type_map=None): # Clear out the title to ensure stale data doesn't clobber other agents' contributions. metadata.title = None valid_posters = [] path = None for track in media.tracks: for item in media.tracks[track].items: for part in item.parts: filename = helpers.unicodize(part.file) path = os.path.dirname(filename) (file_root, fext) = os.path.splitext(filename) path_files = {} for p in os.listdir(path): path_files[p.lower()] = p # Look for posters poster_files = config.POSTER_FILES + [ os.path.basename(file_root), helpers.splitPath(path)[-1] ] for ext in config.ART_EXTS: for name in poster_files: file = (name + '.' + ext).lower() if file in path_files.keys(): data = Core.storage.load(os.path.join(path, path_files[file])) poster_name = hashlib.md5(data).hexdigest() valid_posters.append(poster_name) if poster_name not in metadata.posters: metadata.posters[poster_name] = Proxy.Media(data) Log('Local asset image added: ' + file + ', for file: ' + filename) else: Log('Skipping local poster since its already added') # If there is an appropriate AudioHelper, use it. audio_helper = audiohelpers.AudioHelpers(part.file) if audio_helper != None: try: valid_posters = valid_posters + audio_helper.process_metadata(metadata) # Album sort title. if hasattr(audio_helper, 'get_album_sort_title'): album_sort_title = audio_helper.get_album_sort_title() if album_sort_title and hasattr(metadata, 'title_sort'): metadata.title_sort = album_sort_title if hasattr(audio_helper, 'get_track_sort_title'): track_sort_title = audio_helper.get_track_sort_title() track_key = media.tracks[track].guid or track if track_sort_title and hasattr(metadata.tracks[track_key], 'title_sort'): metadata.tracks[track_key].title_sort = track_sort_title except: pass # Look for a video extra for this track. if find_extras: track_video = findTrackExtra(helpers.unicodize(part.file), extra_type_map) if track_video is not None: track_key = media.tracks[track].guid or track metadata.tracks[track_key].extras.add(track_video)
def AudioHelpers(filename): filename = helpers.unicodize(filename) try: tag = MFile(filename, None, True) except Exception, e: Log('Error getting file details for %s: %s' % (filename, e)) return None
def SubtitleHelpers(filename): filename = helpers.unicodize(filename) subtitle = SubtitleHelper(filename) if subtitle.info != None: return SubtitleHelper(filename) else: return None
def parseArtistExtra(path, extra_type_map, artist_name): video_file, ext = os.path.splitext(os.path.basename(path)) name_components = video_file.split('-') # Set the type and whack the type component from the name if we found one. if len(name_components) > 1 and name_components[-1].lower().strip( ) in extra_type_map: extra_type = extra_type_map[name_components.pop(-1).lower().strip()] else: extra_type = MusicVideoObject # Only return concerts if we're new enough. if extra_type in [ ConcertVideoObject ] and not Util.VersionAtLeast(Platform.ServerVersion, 0, 9, 12, 2): Log('Found concert, but skipping, not new enough server.') return None # Whack the artist name if it's the first component and we have more than one. if len(name_components) > 1 and normalizeArtist( name_components[0]) == artist_name: name_components.pop(0) return extra_type(title='-'.join(name_components), file=helpers.unicodize(path))
def AudioHelpers(filename): filename = helpers.unicodize(filename) file = os.path.basename(filename) (file, ext) = os.path.splitext(file) for cls in [ MP3AudioHelper, MP4AudioHelper, FLACAudioHelper, OGGAudioHelper ]: if cls.is_helper_for(ext): return cls(filename) return None
def VideoHelpers(filename): filename = helpers.unicodize(filename) file = os.path.basename(filename) (file, ext) = os.path.splitext(file) for cls in [ MP4VideoHelper ]: if cls.is_helper_for(ext): return cls(filename) return None
def VideoHelpers(filename): filename = helpers.unicodize(filename) file = os.path.basename(filename) (file, ext) = os.path.splitext(file) for cls in [MP4VideoHelper]: if cls.is_helper_for(ext): return cls(filename) return None
def subtitle_helpers(filename): filename = helpers.unicodize(filename) helper_classes = [DefaultSubtitleHelper] if helpers.cast_bool(Prefs["subtitles.scan.exotic_ext"]): helper_classes.insert(0, VobSubSubtitleHelper) for cls in helper_classes: if cls.is_helper_for(filename): return cls(filename) return None
def findTrackExtra(file_path, extra_type_map, artist_extras={}): # Look for music videos for this track of the format: "track file name - pretty name (optional) - type (optional).ext" file_name = os.path.basename(file_path) file_root, file_ext = os.path.splitext(file_name) track_videos = [] for video in [ f for f in os.listdir(os.path.dirname(file_path)) if os.path.splitext(f)[1][1:].lower() in config.VIDEO_EXTS and helpers.unicodize(f).lower().startswith(file_root.lower()) ]: video_file, ext = os.path.splitext(video) name_components = video_file.split('-') extra_type = MusicVideoObject if len(name_components) > 1: type_component = re.sub(r'[ ._]+', '', name_components[-1].lower()) if type_component in extra_type_map: extra_type = extra_type_map[type_component] name_components.pop(-1) # Use the video file name for the title unless we have a prettier one. pretty_title = '-'.join(name_components).strip() if len(pretty_title) - len(file_root) > 0: pretty_title = pretty_title.replace(file_root, '') if pretty_title.startswith(file_ext): pretty_title = pretty_title[len(file_ext):] pretty_title = re.sub(r'^[- ]+', '', pretty_title) track_video = extra_type(title=pretty_title, file=os.path.join(os.path.dirname(file_path), video)) artist_extras[video] = track_video if extra_type in [MusicVideoObject, LyricMusicVideoObject]: Log('Found video %s for track: %s from file: %s' % (pretty_title, file_name, os.path.join(os.path.dirname(file_path), video))) track_videos.append(track_video) else: Log('Skipping track video %s (only regular music videos allowed on tracks)' % video) if len(track_videos) > 0: track_videos = sorted(track_videos, key=lambda v: (getExtraSortOrder()[type(v)], v.title)) return track_videos[0] else: return None
def normalizeArtist(artist_name): try: u_artist_name = helpers.unicodize(artist_name) ret = '' for i in range(len(u_artist_name)): if not unicodedata.category(u_artist_name[i]).startswith('P'): ret += u_artist_name[i] ret = ret.replace(' ', '').lower() if len(ret) > 0: return ret else: return artist_name except Exception, e: Log('Error normalizing artist: %s' % e) return artist_name
def AudioHelpers(filename): if len(filename) > 0: filename = helpers.unicodize(filename) try: tag = MFile(filename, None, True) except Exception, e: Log('Error getting file details for %s: %s' % (filename, e)) return None if tag is not None: for cls in [ ID3AudioHelper, MP4AudioHelper, FLACAudioHelper, OGGAudioHelper, ASFAudioHelper ]: if cls.is_helper_for(type(tag).__name__): return cls(filename)
def update(self, metadata, media, lang): # Set title if needed. if media and metadata.title is None: metadata.title = media.title valid_posters = [] for track in media.tracks: for item in media.tracks[track].items: for part in item.parts: filename = helpers.unicodize(part.file) path = os.path.dirname(filename) (file_root, fext) = os.path.splitext(filename) path_files = {} for p in os.listdir(path): path_files[p.lower()] = p # Look for posters poster_files = config.POSTER_FILES + [ os.path.basename(file_root), helpers.splitPath(path)[-1] ] for ext in config.ART_EXTS: for name in poster_files: file = (name + '.' + ext).lower() if file in path_files.keys(): data = Core.storage.load( os.path.join(path, path_files[file])) poster_name = hashlib.md5(data).hexdigest() valid_posters.append(poster_name) if poster_name not in metadata.posters: metadata.posters[ poster_name] = Proxy.Media(data) Log('Local asset image added: ' + file + ', for file: ' + filename) else: Log('Skipping local poster since its already added' ) # If there is an appropriate AudioHelper, use it. audio_helper = audiohelpers.AudioHelpers(part.file) if audio_helper != None: valid_posters = valid_posters + audio_helper.process_metadata( metadata) metadata.posters.validate_keys(valid_posters)
def findTrackExtra(file_path, extra_type_map, artist_extras={}): # Look for music videos for this track of the format: "track file name - pretty name (optional) - type (optional).ext" file_name = os.path.basename(file_path) file_root, file_ext = os.path.splitext(file_name) track_videos = [] for video in [f for f in os.listdir(os.path.dirname(file_path)) if os.path.splitext(f)[1][1:].lower() in config.VIDEO_EXTS and helpers.unicodize(f).lower().startswith(file_root.lower())]: video_file, ext = os.path.splitext(video) name_components = video_file.split('-') extra_type = MusicVideoObject if len(name_components) > 1: type_component = re.sub(r'[ ._]+', '', name_components[-1].lower()) if type_component in extra_type_map: extra_type = extra_type_map[type_component] name_components.pop(-1) # Use the video file name for the title unless we have a prettier one. pretty_title = '-'.join(name_components).strip() if len(pretty_title) - len(file_root) > 0: pretty_title = pretty_title.replace(file_root, '') if pretty_title.startswith(file_ext): pretty_title = pretty_title[len(file_ext):] pretty_title = re.sub(r'^[- ]+', '', pretty_title) track_video = extra_type(title=pretty_title, file=os.path.join(os.path.dirname(file_path), video)) artist_extras[video] = track_video if extra_type in [MusicVideoObject, LyricMusicVideoObject]: Log('Found video %s for track: %s from file: %s' % (pretty_title, file_name, os.path.join(os.path.dirname(file_path), video))) track_videos.append(track_video) else: Log('Skipping track video %s (only regular music videos allowed on tracks)' % video) if len(track_videos) > 0: track_videos = sorted(track_videos, key = lambda v: (getExtraSortOrder()[type(v)], v.title)) return track_videos[0] else: return None
def parseArtistExtra(path, extra_type_map, artist_name): video_file, ext = os.path.splitext(os.path.basename(path)) name_components = video_file.split('-') # Set the type and whack the type component from the name if we found one. if len(name_components) > 1 and name_components[-1].lower().strip() in extra_type_map: extra_type = extra_type_map[name_components.pop(-1).lower().strip()] else: extra_type = MusicVideoObject # Only return concerts if we're new enough. if extra_type in [ConcertVideoObject] and not Util.VersionAtLeast(Platform.ServerVersion, 0,9,12,2): Log('Found concert, but skipping, not new enough server.') return None # Whack the artist name if it's the first component and we have more than one. if len(name_components) > 1 and normalizeArtist(name_components[0]) == artist_name: name_components.pop(0) return extra_type(title='-'.join(name_components), file=helpers.unicodize(path))
def update(self, metadata, media, lang): # Set title if needed. if media and metadata.title is None: metadata.title = media.title valid_posters = [] for track in media.tracks: for item in media.tracks[track].items: for part in item.parts: filename = helpers.unicodize(part.file) path = os.path.dirname(filename) (file_root, fext) = os.path.splitext(filename) path_files = {} for p in os.listdir(path): path_files[p.lower()] = p # Look for posters poster_files = config.POSTER_FILES + [ os.path.basename(file_root), helpers.splitPath(path)[-1] ] for ext in config.ART_EXTS: for name in poster_files: file = (name + '.' + ext).lower() if file in path_files.keys(): data = Core.storage.load(os.path.join(path, path_files[file])) poster_name = hashlib.md5(data).hexdigest() valid_posters.append(poster_name) if poster_name not in metadata.posters: metadata.posters[poster_name] = Proxy.Media(data) Log('Local asset image added: ' + file + ', for file: ' + filename) else: Log('Skipping local poster since its already added') # If there is an appropriate AudioHelper, use it. audio_helper = audiohelpers.AudioHelpers(part.file) if audio_helper != None: try: valid_posters = valid_posters + audio_helper.process_metadata(metadata) except: pass metadata.posters.validate_keys(valid_posters)
def SubtitleHelpers(filename): filename = helpers.unicodize(filename) for cls in [VobSubSubtitleHelper, DefaultSubtitleHelper]: if cls.is_helper_for(filename): return cls(filename) return None
def find_subtitles(part, ignore_parts_cleanup=None): lang_sub_map = {} ignore_parts_cleanup = ignore_parts_cleanup or [] part_filename = helpers.unicodize(part.file) part_basename = os.path.splitext(os.path.basename(part_filename))[0] use_filesystem = helpers.cast_bool(Prefs["subtitles.save.filesystem"]) sub_dir_custom = Prefs["subtitles.save.subFolder.Custom"].strip() \ if Prefs["subtitles.save.subFolder.Custom"] else None use_sub_subfolder = Prefs["subtitles.save.subFolder"] != "current folder" and not sub_dir_custom autoclean = helpers.cast_bool(Prefs["subtitles.autoclean"]) sub_subfolder = None paths = [os.path.dirname(part_filename)] if use_filesystem else [] global_folders = [] if use_filesystem: # Check for local subtitles subdirectory sub_dir_base = paths[0] sub_dir_list = [] if use_sub_subfolder: # got selected subfolder sub_subfolder = os.path.join(sub_dir_base, Prefs["subtitles.save.subFolder"]) sub_dir_list.append(sub_subfolder) sub_subfolder = os.path.normpath(helpers.unicodize(sub_subfolder)) if sub_dir_custom: # got custom subfolder sub_dir_custom = os.path.normpath(sub_dir_custom) if os.path.isdir(sub_dir_custom) and os.path.isabs(sub_dir_custom): # absolute folder sub_dir_list.append(sub_dir_custom) global_folders.append(sub_dir_custom) else: # relative folder fld = os.path.join(sub_dir_base, sub_dir_custom) sub_dir_list.append(fld) for sub_dir in sub_dir_list: if os.path.isdir(sub_dir): paths.append(sub_dir) # Check for a global subtitle location global_subtitle_folder = os.path.join(Core.app_support_path, 'Subtitles') if os.path.exists(global_subtitle_folder): paths.append(global_subtitle_folder) global_folders.append(global_subtitle_folder) # normalize all paths paths = [os.path.normpath(helpers.unicodize(path)) for path in paths] # We start by building a dictionary of files to their absolute paths. We also need to know # the number of media files that are actually present, in case the found local media asset # is limited to a single instance per media file. # file_paths = {} total_media_files = 0 media_files = [] for path in paths: for file_path_listing in os.listdir(path.encode(sz_config.fs_encoding)): # When using os.listdir with a unicode path, it will always return a string using the # NFD form. However, we internally are using the form NFC and therefore need to convert # it to allow correct regex / comparisons to be performed. # file_path_listing = helpers.unicodize(file_path_listing) if os.path.isfile(os.path.join(path, file_path_listing).encode(sz_config.fs_encoding)): file_paths[file_path_listing.lower()] = os.path.join(path, file_path_listing) # If we've found an actual media file, we should record it. (root, ext) = os.path.splitext(file_path_listing) if ext.lower()[1:] in config.VIDEO_EXTS: total_media_files += 1 # collect found media files media_files.append(root) # cleanup any leftover subtitle if no associated media file was found if autoclean and ignore_parts_cleanup: Log.Info("Skipping housekeeping of: %s", paths) if use_filesystem and autoclean and not ignore_parts_cleanup: for path in paths: # only housekeep in sub_subfolder if sub_subfolder is used if use_sub_subfolder and path != sub_subfolder and not sz_config.advanced.thorough_cleaning: continue # we can't housekeep the global subtitle folders as we don't know about *all* media files # in a library; skip them skip_path = False for fld in global_folders: if path.startswith(fld): Log.Info("Skipping housekeeping of folder: %s", path) skip_path = True break if skip_path: continue for file_path_listing in os.listdir(path.encode(sz_config.fs_encoding)): file_path_listing = helpers.unicodize(file_path_listing) enc_fn = os.path.join(path, file_path_listing).encode(sz_config.fs_encoding) if os.path.isfile(enc_fn): (root, ext) = os.path.splitext(file_path_listing) # it's a subtitle file if ext.lower()[1:] in config.SUBTITLE_EXTS_BASE: # get fn without forced/default/normal tag split_tag = root.rsplit(".", 1) if len(split_tag) > 1 and split_tag[1].lower() in SECONDARY_TAGS: root = split_tag[0] # get associated media file name without language sub_fn = subtitlehelpers.ENDSWITH_LANGUAGECODE_RE.sub("", root) # subtitle basename and basename without possible language tag not found in collected # media files? kill. if root not in media_files and sub_fn not in media_files: Log.Info("Removing leftover subtitle: %s", os.path.join(path, file_path_listing)) try: os.remove(enc_fn) except (OSError, IOError): Log.Error("Removing failed") Log('Looking for subtitle media in %d paths with %d media files.', len(paths), total_media_files) Log('Paths: %s', ", ".join([helpers.unicodize(p) for p in paths])) for file_path in file_paths.values(): local_filename = os.path.basename(file_path) bn, ext = os.path.splitext(local_filename) local_basename = helpers.unicodize(bn) # get fn without forced/default/normal tag split_tag = local_basename.rsplit(".", 1) has_additional_tag = False if len(split_tag) > 1 and split_tag[1].lower() in SECONDARY_TAGS: local_basename = split_tag[0] has_additional_tag = True # split off possible language tag local_basename2 = local_basename.rsplit('.', 1)[0] filename_matches_part = local_basename == part_basename or local_basename2 == part_basename filename_contains_part = part_basename in local_basename if not ext.lower()[1:] in config.SUBTITLE_EXTS: continue # if the file is located within the global subtitle folders and its name doesn't match exactly, ignore it if global_folders and not filename_matches_part: skip_path = False for fld in global_folders: if file_path.startswith(fld): skip_path = True break if skip_path: continue # determine whether to pick up the subtitle based on our match strictness if not filename_matches_part: if sz_config.ext_match_strictness == "strict" or ( sz_config.ext_match_strictness == "loose" and not filename_contains_part): # Log.Debug("%s doesn't match %s, skipping" % (helpers.unicodize(local_filename), # helpers.unicodize(part_basename))) continue subtitle_helper = subtitlehelpers.subtitle_helpers(file_path) if subtitle_helper is not None: local_lang_map = subtitle_helper.process_subtitles(part) for new_language, subtitles in local_lang_map.items(): # Add the possible new language along with the located subtitles so that we can validate them # at the end... # if not lang_sub_map.has_key(new_language): lang_sub_map[new_language] = [] lang_sub_map[new_language] = lang_sub_map[new_language] + subtitles # add known metadata subs to our sub list if not use_filesystem: for language, sub_list in subtitlehelpers.get_subtitles_from_metadata(part).iteritems(): if sub_list: if language not in lang_sub_map: lang_sub_map[language] = [] lang_sub_map[language] = lang_sub_map[language] + sub_list # Now whack subtitles that don't exist anymore. for language in lang_sub_map.keys(): part.subtitles[language].validate_keys(lang_sub_map[language]) # Now whack the languages that don't exist anymore. for language in list(set(part.subtitles.keys()) - set(lang_sub_map.keys())): part.subtitles[language].validate_keys({})
def findSubtitles(part): lang_sub_map = {} part_filename = helpers.unicodize(part.file) part_basename = os.path.splitext(os.path.basename(part_filename))[0] paths = [os.path.dirname(part_filename)] # Check for local subtitles subdirectory sub_dirs_default = ["sub", "subs", "subtitle", "subtitles"] sub_dir_base = paths[0] sub_dir_list = [] if Prefs["subtitles.save.subFolder"] != "current folder": # got selected subfolder sub_dir_list.append( os.path.join(sub_dir_base, Prefs["subtitles.save.subFolder"])) sub_dir_custom = Prefs["subtitles.save.subFolder.Custom"].strip() if bool( Prefs["subtitles.save.subFolder.Custom"]) else None if sub_dir_custom: # got custom subfolder if sub_dir_custom.startswith("/"): # absolute folder sub_dir_list.append(sub_dir_custom) else: # relative folder sub_dir_list.append(os.path.join(sub_dir_base, sub_dir_custom)) for sub_dir in sub_dir_list: if os.path.isdir(sub_dir): paths.append(sub_dir) # Check for a global subtitle location global_subtitle_folder = os.path.join(Core.app_support_path, 'Subtitles') if os.path.exists(global_subtitle_folder): paths.append(global_subtitle_folder) # We start by building a dictionary of files to their absolute paths. We also need to know # the number of media files that are actually present, in case the found local media asset # is limited to a single instance per media file. # file_paths = {} total_media_files = 0 for path in paths: path = helpers.unicodize(path) for file_path_listing in os.listdir(path): # When using os.listdir with a unicode path, it will always return a string using the # NFD form. However, we internally are using the form NFC and therefore need to convert # it to allow correct regex / comparisons to be performed. # file_path_listing = helpers.unicodize(file_path_listing) if os.path.isfile(os.path.join(path, file_path_listing)): file_paths[file_path_listing.lower()] = os.path.join( path, file_path_listing) # If we've found an actual media file, we should record it. (root, ext) = os.path.splitext(file_path_listing) if ext.lower()[1:] in config.VIDEO_EXTS: total_media_files += 1 Log('Looking for subtitle media in %d paths with %d media files.', len(paths), total_media_files) Log('Paths: %s', ", ".join([helpers.unicodize(p) for p in paths])) for file_path in file_paths.values(): local_basename = helpers.unicodize( os.path.splitext(os.path.basename(file_path))[0]) local_basename2 = local_basename.rsplit('.', 1)[0] filename_matches_part = local_basename == part_basename or local_basename2 == part_basename # If the file is located within the global subtitle folder and it's name doesn't match exactly # then we should simply ignore it. # if file_path.count( global_subtitle_folder) and not filename_matches_part: continue # If we have more than one media file within the folder and located filename doesn't match # exactly then we should simply ignore it. # if total_media_files > 1 and not filename_matches_part: continue subtitle_helper = subtitlehelpers.SubtitleHelpers(file_path) if subtitle_helper != None: local_lang_map = subtitle_helper.process_subtitles(part) for new_language, subtitles in local_lang_map.items(): # Add the possible new language along with the located subtitles so that we can validate them # at the end... # if not lang_sub_map.has_key(new_language): lang_sub_map[new_language] = [] lang_sub_map[ new_language] = lang_sub_map[new_language] + subtitles # add known metadata subs to our sub list if not Prefs['subtitles.save.filesystem']: for language, sub_list in subtitlehelpers.getSubtitlesFromMetadata( part).iteritems(): if sub_list: if not language in lang_sub_map: lang_sub_map[language] = [] lang_sub_map[language] = lang_sub_map[language] + sub_list # Now whack subtitles that don't exist anymore. for language in lang_sub_map.keys(): part.subtitles[language].validate_keys(lang_sub_map[language]) # Now whack the languages that don't exist anymore. for language in list( set(part.subtitles.keys()) - set(lang_sub_map.keys())): part.subtitles[language].validate_keys({})
def find_subtitles(part): lang_sub_map = {} part_filename = helpers.unicodize(part.file) part_basename = os.path.splitext(os.path.basename(part_filename))[0] use_filesystem = bool(Prefs["subtitles.save.filesystem"]) paths = [os.path.dirname(part_filename)] if use_filesystem else [] global_subtitle_folder = None if use_filesystem: # Check for local subtitles subdirectory sub_dir_base = paths[0] sub_dir_list = [] if Prefs["subtitles.save.subFolder"] != "current folder": # got selected subfolder sub_dir_list.append(os.path.join(sub_dir_base, Prefs["subtitles.save.subFolder"])) sub_dir_custom = Prefs["subtitles.save.subFolder.Custom"].strip() if bool(Prefs["subtitles.save.subFolder.Custom"]) else None if sub_dir_custom: # got custom subfolder if sub_dir_custom.startswith("/"): # absolute folder sub_dir_list.append(sub_dir_custom) else: # relative folder sub_dir_list.append(os.path.join(sub_dir_base, sub_dir_custom)) for sub_dir in sub_dir_list: if os.path.isdir(sub_dir): paths.append(sub_dir) # Check for a global subtitle location global_subtitle_folder = os.path.join(Core.app_support_path, 'Subtitles') if os.path.exists(global_subtitle_folder): paths.append(global_subtitle_folder) # We start by building a dictionary of files to their absolute paths. We also need to know # the number of media files that are actually present, in case the found local media asset # is limited to a single instance per media file. # file_paths = {} total_media_files = 0 for path in paths: path = helpers.unicodize(path) for file_path_listing in os.listdir(path.encode(sz_config.fs_encoding)): # When using os.listdir with a unicode path, it will always return a string using the # NFD form. However, we internally are using the form NFC and therefore need to convert # it to allow correct regex / comparisons to be performed. # file_path_listing = helpers.unicodize(file_path_listing) if os.path.isfile(os.path.join(path, file_path_listing).encode(sz_config.fs_encoding)): file_paths[file_path_listing.lower()] = os.path.join(path, file_path_listing) # If we've found an actual media file, we should record it. (root, ext) = os.path.splitext(file_path_listing) if ext.lower()[1:] in config.VIDEO_EXTS: total_media_files += 1 Log('Looking for subtitle media in %d paths with %d media files.', len(paths), total_media_files) Log('Paths: %s', ", ".join([helpers.unicodize(p) for p in paths])) for file_path in file_paths.values(): local_basename = helpers.unicodize(os.path.splitext(os.path.basename(file_path))[0]) local_basename2 = local_basename.rsplit('.', 1)[0] filename_matches_part = local_basename == part_basename or local_basename2 == part_basename # If the file is located within the global subtitle folder and it's name doesn't match exactly # then we should simply ignore it. # if global_subtitle_folder and file_path.count(global_subtitle_folder) and not filename_matches_part: continue # If we have more than one media file within the folder and located filename doesn't match # exactly then we should simply ignore it. # if total_media_files > 1 and not filename_matches_part: continue subtitle_helper = subtitlehelpers.subtitle_helpers(file_path) if subtitle_helper != None: local_lang_map = subtitle_helper.process_subtitles(part) for new_language, subtitles in local_lang_map.items(): # Add the possible new language along with the located subtitles so that we can validate them # at the end... # if not lang_sub_map.has_key(new_language): lang_sub_map[new_language] = [] lang_sub_map[new_language] = lang_sub_map[new_language] + subtitles # add known metadata subs to our sub list if not use_filesystem: for language, sub_list in subtitlehelpers.get_subtitles_from_metadata(part).iteritems(): if sub_list: if language not in lang_sub_map: lang_sub_map[language] = [] lang_sub_map[language] = lang_sub_map[language] + sub_list # Now whack subtitles that don't exist anymore. for language in lang_sub_map.keys(): part.subtitles[language].validate_keys(lang_sub_map[language]) # Now whack the languages that don't exist anymore. for language in list(set(part.subtitles.keys()) - set(lang_sub_map.keys())): part.subtitles[language].validate_keys({})
def findAudio(parts, conn): base_name = "" for part in parts: part_file = helpers.unicodize(part.file) dir_path = os.path.dirname(part_file) Log.Debug('Processing file %s', part_file) part_base_name = os.path.basename(part_file) separator_position = part_base_name.find(' - ') if separator_position > 0: part_base_name = part_base_name[:separator_position] else: (part_base_name, ext) = os.path.splitext(part_base_name) Log.Debug('Base name is %s', part_base_name) cur = conn.cursor() cur.execute( 'SELECT id, media_item_id FROM media_parts WHERE file = "' + part_file + '"') (media_part_id, media_item_id) = cur.fetchone() cur.close last_index = 0 for stream in [x for x in part.streams if x.type == 2]: if last_index < stream.index: last_index = stream.index last_index = last_index + 1 if last_index < 1000: Log.Debug( 'There is no sided audio stream for current video file. Setting audio stream index to 1000' ) last_index = 1000 else: Log.Debug( 'Sided audio stream for current video file was found. Setting audio stream index to %s', last_index) found_audio_streams = [] for file_path in sorted(os.listdir(dir_path)): file_path = helpers.unicodize(file_path) full_path = os.path.join(dir_path, file_path) (root, ext) = os.path.splitext(file_path) if ext.lower()[1:] not in config.AUDIO_TRACKS: Log.Debug('File %s is not an audio track. Skipping.', file_path) continue audio_regex = r"^" + re.escape( part_base_name) + r"\.[a-zA-Z]{2,3}\.[a-zA-Z0-9_.-]*" audio_with_commnet = re.match(audio_regex, root) audio_regex = r"^" + re.escape( part_base_name) + r"\.[a-zA-Z]{2,3}$" audio_no_commnet = re.match(audio_regex, root) if (not audio_with_commnet and not audio_no_commnet): Log.Debug( 'File %s is not an audio track for processed item. Skipping', file_path) continue file_lang_search = re.search( r"^" + re.escape(part_base_name) + r"\.([a-zA-Z]{2,3})", root) if file_lang_search: file_lang = file_lang_search.group(1) media_data = check_output([ os.path.join(Prefs['ffmpeg_path'], "ffprobe"), "-hide_banner", "-loglevel", "fatal", "-show_error", "-show_streams", "-select_streams", "a", "-print_format", "json", "-i", full_path ]).decode("utf-8") media_data = json.loads(media_data) for media_stream in media_data['streams']: result = type('AudioResult', (object, ), {})() result.codec = media_stream["codec_name"] if result.codec == "dts": result.codec = "dca" result.extra_data = {} if 'channel_layout' in media_stream: result.extra_data['ma:audioChannelLayout'] = media_stream[ 'channel_layout'] if 'sample_rate' in media_stream: result.extra_data['ma:samplingRate'] = media_stream[ 'sample_rate'] if result.codec == "dca": if 'bits_per_raw_sample' in media_stream: result.extra_data['ma:bitDepth'] = media_stream[ 'bits_per_raw_sample'] else: result.extra_data['ma:bitDepth'] = 24 if 'profile' in media_stream: result.extra_data['ma:profile'] = profile( media_stream['profile']) result.language = 'und' title = '' if 'tags' in media_stream: if 'title' in media_stream['tags']: result.extra_data['ma:title'] = media_stream['tags'][ 'title'] if 'language' in media_stream['tags']: result.language = media_stream['tags']['language'] if result.language != file_lang and ( 'tags' not in media_stream or 'language' not in media_stream['tags']): result.language = file_lang if title != "" and ('tags' not in media_stream or 'title' not in media_stream['tags']): result.extra_data['ma:title'] = title result.extra_data = urllib.urlencode(result.extra_data) result.bitrate = None if "bit_rate" in media_stream: result.bitrate = int(media_stream["bit_rate"]) result.stream_type_id = 2 result.url = 'file://' + full_path result.channels = media_stream["channels"] result.index = media_stream["index"] found_audio_streams.append(result.url) if (any( hasattr(x, 'url') and x.url == result.url for x in part.streams)): Log.Debug('Audio track %s is already assigned', file_path) continue Log.Debug( 'Adding audio track %s to the processed item with stream index %s', file_path, last_index) date = time.strftime('%Y-%m-%d %H:%M:%S') data = ( None, 2, media_item_id, result.url, result.codec, result.language, date, date, last_index, media_part_id, result.channels, result.bitrate, result.index, 0, 0, result.extra_data, ) cur = conn.cursor() cur.execute( 'INSERT INTO `media_streams` (`id`, `stream_type_id`, `media_item_id`, `url`, `codec`, `language`, `created_at`, `updated_at`, `index`, `media_part_id`, `channels`, `bitrate`, `url_index`, `default`, `forced`, `extra_data`) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);', data) cur.execute( 'SELECT `metadata_item_id` FROM `media_items` WHERE `id` = ? LIMIT 1', (media_item_id, )) (metadata_item_id, ) = cur.fetchone() cur.execute( 'UPDATE `metadata_items` SET `added_at`=? WHERE `id`=?', ( date, metadata_item_id, )) cur.close() last_index = last_index + 1 cur = conn.cursor() if len(found_audio_streams) > 0: # Nothing from https://support.plex.tv/articles/203810286-what-media-formats-are-supported/ supports split, so we will use that to force transcoding cur.execute( 'UPDATE `media_items` SET `container` = "split", `audio_codec` = ? WHERE `id` = ?', ( result.codec, media_item_id, )) Log.Debug('Updating media item container attribute') cur.execute( 'SELECT `extra_data` FROM `media_parts` WHERE `file` = ? LIMIT 1', (part.file, )) (extra_data_part, ) = cur.fetchone() sq = urlparse.parse_qsl(extra_data_part) extra_data = dict(sq) extra_data['ma:container'] = 'split' extra_data_encoded = urllib.urlencode(extra_data) cur.execute( 'UPDATE `media_parts` SET `extra_data` = ? WHERE `file` = ?', ( extra_data_encoded, part.file, )) Log.Debug('Updating media part container attribute') for del_stream in [ x for x in part.streams if hasattr(x, 'url') and x.type == 2 and x.url not in found_audio_streams ]: Log.Debug("Deleting unused stream %s", del_stream.url) cur.execute('DELETE FROM `media_streams` WHERE `url` = ?', (del_stream.url, )) cur.close() conn.commit()
def updateAlbum(metadata, media, lang, find_extras=False, artist_extras={}, extra_type_map=None): # Clear out the title to ensure stale data doesn't clobber other agents' contributions. metadata.title = None # clear out genres for this album so we will get genres for all tracks in audio_helper.process_metadata(metadata) metadata.genres.clear() valid_posters = [] valid_art = [] valid_keys = defaultdict(list) path = None for index, track in enumerate(media.children): track_key = track.guid or index for item in track.items: for part in item.parts: filename = helpers.unicodize(part.file) path = os.path.dirname(filename) (file_root, fext) = os.path.splitext(filename) path_files = {} for p in os.listdir(path): path_files[p.lower()] = p # Look for posters poster_files = config.ALBUM_POSTER_FILES + [ os.path.basename(file_root), helpers.splitPath(path)[-1] ] path_file_keys = path_files.keys() while len(path_file_keys) > 0: data_file = path_file_keys.pop(0) if data_file in config.ALBUM_POSTER_DIRS and os.path.isdir( os.path.join(path, path_files[data_file])): for p in os.listdir( os.path.join(path, path_files[data_file])): p = os.path.join(path_files[data_file], p) path_files[p.lower()] = p path_file_keys.append(p.lower()) continue poster_match = False art_match = False (art_base, art_ext) = os.path.splitext(data_file) if not art_ext[1:] in config.ART_EXTS: continue if os.path.dirname(data_file) in config.ALBUM_POSTER_DIRS: poster_match = True if not poster_match: for name in poster_files: if art_base.startswith(name): poster_match = True break if not poster_match: for name in config.ART_FILES: if art_base.startswith(name): art_match = True break if poster_match or art_match: data = Core.storage.load( os.path.join(path, path_files[data_file])) digest = hashlib.md5(data).hexdigest() (valid_posters if poster_match else valid_art).append(digest) addAlbumImage( metadata.posters if poster_match else metadata.art, 'poster' if poster_match else 'art', data_file, filename, data, digest) # If there is an appropriate AudioHelper, use it. audio_helper = audiohelpers.AudioHelpers(part.file) if audio_helper != None: try: valid_posters = valid_posters + audio_helper.process_metadata( metadata) # Album sort title. if hasattr(audio_helper, 'get_album_sort_title'): album_sort_title = audio_helper.get_album_sort_title( ) if album_sort_title and hasattr( metadata, 'title_sort'): metadata.title_sort = album_sort_title if hasattr(audio_helper, 'get_track_sort_title'): track_sort_title = audio_helper.get_track_sort_title( ) if track_sort_title and hasattr( metadata.tracks[track_key], 'title_sort'): metadata.tracks[ track_key].title_sort = track_sort_title except: pass # Look for a video extra for this track. if find_extras: track_video = findTrackExtra(media, track, helpers.unicodize(part.file), extra_type_map) if track_video is not None: metadata.tracks[track_key].extras.add(track_video) # Look for lyrics. LYRIC_EXTS = ['txt', 'lrc'] for ext in LYRIC_EXTS: file = (file_root + '.' + ext) if os.path.exists(file): metadata.tracks[track_key].lyrics[ file] = Proxy.LocalFile(file, format=ext) valid_keys[track_key].append(file) for key in metadata.tracks: metadata.tracks[key].lyrics.validate_keys(valid_keys[key]) metadata.posters.validate_keys(valid_posters) metadata.art.validate_keys(valid_art)
def update(self, metadata, media, lang): # Clear out the title to ensure stale data doesn't clobber other agents' contributions. metadata.title = None if shouldFindExtras(): extra_type_map = getExtraTypeMap() artist_file_dirs = [] artist_extras = {} metadata.genres.clear() album_genres = [] # First look for track extras. checked_tag = False for album in media.children: for track in album.children: part = helpers.unicodize(track.items[0].parts[0].file) findTrackExtra(part, extra_type_map, artist_extras) artist_file_dirs.append(os.path.dirname(part)) audio_helper = audiohelpers.AudioHelpers(part) if media.title.lower() not in GENERIC_ARTIST_NAMES: if audio_helper and hasattr(audio_helper, 'get_track_genres'): genres = audio_helper.get_track_genres() for genre in genres: if genre not in album_genres: album_genres.append(genre) # Look for artist sort field from first track. # TODO maybe analyse all tracks and only add title_sort if they are the same. if checked_tag == False: checked_tag = True if audio_helper and hasattr(audio_helper, 'get_artist_sort_title'): artist_sort_title = audio_helper.get_artist_sort_title() if artist_sort_title and hasattr(metadata, 'title_sort'): metadata.title_sort = artist_sort_title for genre in album_genres: metadata.genres.add(genre) # Now go through this artist's directories looking for additional extras and local art. checked_artist_path = False for artist_file_dir in set(artist_file_dirs): path = helpers.unicodize(artist_file_dir) findArtistExtras(path, extra_type_map, artist_extras, media.title) parentdir = os.path.split(os.path.abspath(path[:-1]))[0] name_parentdir = os.path.basename(parentdir) artist_has_own_dir = False path_to_use = path if normalizeArtist(name_parentdir) == normalizeArtist(media.title): artist_has_own_dir = True path_to_use = parentdir if checked_artist_path is False: checked_artist_path = True path_files = {} for p in os.listdir(path_to_use): path_files[p.lower()] = p # Look for posters and art valid_posters = [] valid_art = [] valid_file_names = getValidFileNamesForArt(config.ARTIST_POSTER_FILES, config.ARTIST_PREFIX, artist_has_own_dir) for file in valid_file_names: if file in path_files.keys(): data = Core.storage.load(os.path.join(path_to_use, path_files[file])) poster_name = hashlib.md5(data).hexdigest() valid_posters.append(poster_name) if poster_name not in metadata.posters: metadata.posters[poster_name] = Proxy.Media(data) valid_file_names = getValidFileNamesForArt(config.ART_FILES, config.ARTIST_PREFIX, artist_has_own_dir) for file in valid_file_names: if file in path_files.keys(): data = Core.storage.load(os.path.join(path_to_use, path_files[file])) art_name = hashlib.md5(data).hexdigest() valid_art.append(art_name) if art_name not in metadata.art: metadata.art[art_name] = Proxy.Media(data) metadata.art.validate_keys(valid_art) metadata.posters.validate_keys(valid_posters) for extra in sorted(artist_extras.values(), key = lambda v: (getExtraSortOrder()[type(v)], v.title)): metadata.extras.add(extra)
def findSubtitles(part): RE_METAFILES = re.compile(r'^[\.~]') lang_sub_map = {} part_filename = helpers.unicodize(part.file) part_basename = os.path.splitext(os.path.basename(part_filename))[0] paths = [os.path.dirname(part_filename)] local_subtitle_folders = [ language.strip() for language in Prefs['subs_folder_path'].split(',') ] if local_subtitle_folders is not None: for f in local_subtitle_folders: local_subtitle_folder_path = os.path.join(paths[0], f) if os.path.exists(local_subtitle_folder_path): paths.append(local_subtitle_folder_path) # Check for a global subtitle location global_subtitle_folder = os.path.join(Core.app_support_path, 'Subtitles') if os.path.exists(global_subtitle_folder): paths.append(global_subtitle_folder) # We start by building a dictionary of files to their absolute paths. We also need to know # the number of media files that are actually present, in case the found local media asset # is limited to a single instance per media file. # file_paths = {} total_media_files = 0 for path in paths: path = helpers.unicodize(path) for file_path_listing in os.listdir(path): # When using os.listdir with a unicode path, it will always return a string using the # NFD form. However, we internally are using the form NFC and therefore need to convert # it to allow correct regex / comparisons to be performed. # file_path_listing = helpers.unicodize(file_path_listing) if os.path.isfile(os.path.join(path, file_path_listing) ) and not RE_METAFILES.search(file_path_listing): file_paths[file_path_listing.lower()] = os.path.join( path, file_path_listing) # If we've found an actual media file, we should record it. (root, ext) = os.path.splitext(file_path_listing) if ext.lower()[1:] in config.VIDEO_EXTS: total_media_files += 1 Log('Looking for subtitle media in %d paths with %d media files.', len(paths), total_media_files) Log('Paths: %s', ", ".join([helpers.unicodize(p) for p in paths])) for file_path in file_paths.values(): local_basename = helpers.unicodize( os.path.splitext( os.path.basename(file_path))[0]) # no language, no flag local_basename2 = local_basename.rsplit( '.', 1)[0] # includes language, no flag local_basename3 = local_basename2.rsplit( '.', 1)[0] # includes language and flag filename_matches_part = local_basename == part_basename or local_basename2 == part_basename or local_basename3 == part_basename # If the file is located within the global subtitle folder and it's name doesn't match exactly # then we should simply ignore it. # if file_path.count( global_subtitle_folder) and not filename_matches_part: continue # If we have more than one media file within the folder and located filename doesn't match # exactly then we should simply ignore it. # if total_media_files > 1 and not filename_matches_part: continue subtitle_helper = subtitlehelpers.SubtitleHelpers(file_path) if subtitle_helper is not None: local_lang_map = subtitle_helper.process_subtitles(part) for new_language, subtitles in local_lang_map.items(): # Add the possible new language along with the located subtitles so that we can validate them # at the end... # if not lang_sub_map.has_key(new_language): lang_sub_map[new_language] = [] lang_sub_map[ new_language] = lang_sub_map[new_language] + subtitles # Now whack subtitles that don't exist anymore. for language in lang_sub_map.keys(): part.subtitles[language].validate_keys(lang_sub_map[language]) # Now whack the languages that don't exist anymore. for language in list( set(part.subtitles.keys()) - set(lang_sub_map.keys())): part.subtitles[language].validate_keys({})
def findAssests(metadata, paths, type, part = None): root_file = getRootFile(helpers.unicodize(part.file)) if part else None # We start by building a dictionary of files to their absolute paths. We also need to know # the number of media files that are actually present, in case the found local media asset # is limited to a single instance per media file. path_files = {} total_media_files = 0 for path in paths: path = helpers.unicodize(path) for file_path in os.listdir(path): # When using os.listdir with a unicode path, it will always return a string using the # NFD form. However, we internally are using the form NFC and therefore need to convert # it to allow correct regex / comparisons to be performed. if os.path.isfile(os.path.join(path, helpers.unicodize(file_path))): path_files[file_path.lower()] = os.path.join(path, file_path) # If we've found an actual media file (not a trailer), we should record it. (root, ext) = os.path.splitext(file_path) if ext.lower()[1:] in config.VIDEO_EXTS and root.lower().endswith('-trailer') == False: total_media_files += 1 Log('Looking for %s media (%s) in %d paths (root file: %s) with %d media files.', type, metadata.title, len(paths), root_file, total_media_files) Log('Paths: %s', ", ".join([ helpers.unicodize(p) for p in paths ])) # Figure out what regexs to use. search_tuples = [] if type == 'season': search_tuples += [['season(-|0|\s)?%s[-a-z]?(-poster)?' % metadata.index, metadata.posters, config.IMAGE_EXTS, False]] search_tuples += [['season(-|0|\s)?%s-banner[-a-z]?' % metadata.index, metadata.banners, config.IMAGE_EXTS, False]] if int(metadata.index) == 0: # Season zero, also look for Frodo-compliant 'specials' artwork. search_tuples += [['season-specials-poster', metadata.posters, config.IMAGE_EXTS, False]] search_tuples += [['season-specials-banner', metadata.banners, config.IMAGE_EXTS, False]] elif type == 'show': search_tuples += [['(show|poster|folder)-?[0-9]?', metadata.posters, config.IMAGE_EXTS, False]] search_tuples += [['banner-?[0-9]?', metadata.banners, config.IMAGE_EXTS, False]] search_tuples += [['(fanart|art|background|backdrop)-?[0-9]?', metadata.art, config.IMAGE_EXTS, False]] search_tuples += [['theme-?[0-9]?', metadata.themes, config.AUDIO_EXTS, False]] elif type == 'episode': search_tuples += [[re.escape(root_file) + '(-|-thumb)?[0-9]?', metadata.thumbs, config.IMAGE_EXTS, False]] elif type == 'movie': search_tuples += [['(poster|default|cover|movie|folder|' + re.escape(root_file) + ')-?[0-9]?', metadata.posters, config.IMAGE_EXTS, True]] search_tuples += [['(fanart|art|background|backdrop|' + re.escape(root_file) + '-fanart' + ')-?[0-9]?', metadata.art, config.IMAGE_EXTS, True]] for (pattern, media_list, extensions, limited) in search_tuples: valid_keys = [] sort_index = 1 file_path_keys = sorted(path_files.keys(), key = lambda x: os.path.splitext(x)[0]) for file_path in file_path_keys: for ext in extensions: if re.match('%s.%s' % (pattern, ext), file_path, re.IGNORECASE): # Use a pattern if it's unlimited, or if there's only one media file. if (limited and total_media_files == 1) or (not limited) or (file_path.find(root_file.lower()) == 0): # Read data and hash it. data = Core.storage.load(path_files[file_path]) media_hash = hashlib.md5(data).hexdigest() # See if we need to add it. valid_keys.append(media_hash) if media_hash not in media_list: media_list[media_hash] = Proxy.Media(data, sort_order = sort_index) sort_index = sort_index + 1 Log(' Local asset added: %s (%s)', path_files[file_path], media_hash) else: Log('Skipping file %s because there are %d media files.', file_path, total_media_files) Log('Found %d valid things for pattern %s (ext: %s)', len(valid_keys), pattern, str(extensions)) media_list.validate_keys(valid_keys)
def updateAlbum(metadata, media, lang, find_extras=False, artist_extras={}, extra_type_map=None, prefs={}): # clear out genres for this album so we will get genres for all tracks in audio_helper.process_metadata(metadata) metadata.genres.clear() valid_posters = [] valid_art = [] valid_keys = defaultdict(list) valid_track_keys = [] path = None for index, track in enumerate(media.children): track_key = track.id or index valid_track_keys.append(track_key) for item in track.items: for part in item.parts: filename = helpers.unicodize(part.file) path = os.path.dirname(filename) (file_root, fext) = os.path.splitext(filename) path_files = {} if len(path) > 0: for p in os.listdir(path): path_files[p.lower()] = p # Look for posters poster_files = config.ALBUM_POSTER_FILES + [ os.path.basename(file_root), helpers.splitPath(path)[-1] ] path_file_keys = path_files.keys() order = 1 while len(path_file_keys) > 0: data_file = path_file_keys.pop(0) if data_file in config.ALBUM_POSTER_DIRS and os.path.isdir( os.path.join(path, path_files[data_file])): Log('Searching art subdir %s for file %s', os.path.join(path, path_files[data_file]), filename) for p in os.listdir( os.path.join(path, path_files[data_file])): p = os.path.join(path_files[data_file], p) path_files[p.lower()] = p path_file_keys.append(p.lower()) continue poster_match = False art_match = False (art_base, art_ext) = os.path.splitext(data_file) if not art_ext[1:] in config.ART_EXTS: continue if os.path.dirname(data_file) in config.ALBUM_POSTER_DIRS: poster_match = True if not poster_match: for name in poster_files: if art_base.startswith(name): poster_match = True break if not poster_match: for name in config.ART_FILES: if art_base.startswith(name): art_match = True break # If we only want posters from the cloud, ignore anything we find. if prefs['albumPosters'] == 2: poster_match = False if poster_match or art_match: data = Core.storage.load( os.path.join(path, path_files[data_file])) digest = hashlib.md5(data).hexdigest() (valid_posters if poster_match else valid_art).append(digest) addAlbumImage( metadata.posters if poster_match else metadata.art, 'poster' if poster_match else 'art', data_file, filename, data, digest, order) order = order + 1 # If there is an appropriate AudioHelper, use it. audio_helper = audiohelpers.AudioHelpers(part.file) if audio_helper != None: try: valid_posters = valid_posters + audio_helper.process_metadata( metadata, prefs) # Album title (making sure not to blank it out). if hasattr(audio_helper, 'get_album_title'): album_title = audio_helper.get_album_title() if album_title: album_title = CleanString(album_title) if len(album_title) > 0: metadata.title = album_title # Album sort title. if hasattr(audio_helper, 'get_album_sort_title'): metadata.title_sort = '' album_sort_title = audio_helper.get_album_sort_title( ) if album_sort_title and hasattr( metadata, 'title_sort'): metadata.title_sort = CleanString( album_sort_title) # Album summary if hasattr(audio_helper, 'get_album_summary'): metadata.summary = '' album_summary = audio_helper.get_album_summary() if album_summary: metadata.summary = CleanString(album_summary) if hasattr(audio_helper, 'get_track_sort_title'): track_sort_title = audio_helper.get_track_sort_title( ) metadata.tracks[track_key].title_sort = '' if track_sort_title and hasattr( metadata.tracks[track_key], 'title_sort'): metadata.tracks[ track_key].title_sort = CleanString( track_sort_title) # Track title if hasattr(audio_helper, 'get_track_title'): track_title = audio_helper.get_track_title() metadata.tracks[track_key].title = '' if track_title is not None: metadata.tracks[track_key].title = CleanString( track_title) else: metadata.tracks[ track_key].title = CleanFilename(part.file) # Track index. if hasattr(audio_helper, 'get_track_index'): track_index = audio_helper.get_track_index() if track_index is not None: metadata.tracks[ track_key].track_index = track_index # Track parent index. if hasattr(audio_helper, 'get_track_parent_index'): track_parent_index = audio_helper.get_track_parent_index( ) if track_parent_index is not None: metadata.tracks[ track_key].disc_index = track_parent_index # Track artist. if hasattr(audio_helper, 'get_track_artist'): track_artist = audio_helper.get_track_artist() metadata.tracks[ track_key].original_title = BLANK_FIELD if track_artist is not None: metadata.tracks[ track_key].original_title = StringOrBlank( track_artist) except: Log('Exception reading tags') # Look for a video extra for this track. if find_extras: track_video = findTrackExtra(media, track, helpers.unicodize(part.file), extra_type_map) if track_video is not None: metadata.tracks[track_key].extras.add(track_video) # Look for lyrics. LYRIC_EXTS = ['txt', 'lrc'] for ext in LYRIC_EXTS: file = (file_root + '.' + ext) if os.path.exists(file): Log('Found a lyric in %s', file) metadata.tracks[track_key].lyrics[ file] = Proxy.LocalFile(file, format=ext) valid_keys[track_key].append(file) for key in metadata.tracks: metadata.tracks[key].lyrics.validate_keys(valid_keys[key]) metadata.tracks.validate_keys(valid_track_keys) metadata.posters.validate_keys(valid_posters) metadata.art.validate_keys(valid_art)
if find_extras: extra_type_map = {'trailer' : TrailerObject, 'deleted' : DeletedSceneObject, 'behindthescenes' : BehindTheScenesObject, 'interview' : InterviewObject, 'scene' : SceneOrSampleObject} # We start by building a dictionary of files to their absolute paths. We also need to know # the number of media files that are actually present, in case the found local media asset # is limited to a single instance per media file. # path_files = {} multi_parts = [] total_media_files = 0 root_file = getRootFile(helpers.unicodize(parts[0].file)) if parts else None for path in paths: path = helpers.unicodize(path) for file_path in sorted(os.listdir(path)): # When using os.listdir with a unicode path, it will always return a string using the # NFD form. However, we internally are using the form NFC and therefore need to convert # it to allow correct regex / comparisons to be performed. # file_path = helpers.unicodize(file_path) full_path = os.path.join(path,file_path) if os.path.isfile(full_path): path_files[file_path.lower()] = full_path # Only count real and distinct (not stacked) video files.
def update(self, metadata, media, lang): # Clear out the title to ensure stale data doesn't clobber other agents' contributions. metadata.title = None if shouldFindExtras(): extra_type_map = getExtraTypeMap() artist_file_dirs = [] artist_extras = {} metadata.genres.clear() album_genres = [] # First look for track extras. checked_tag = False for album in media.children: for track in album.children: part = helpers.unicodize(track.items[0].parts[0].file) findTrackExtra(part, extra_type_map, artist_extras) artist_file_dirs.append(os.path.dirname(part)) audio_helper = audiohelpers.AudioHelpers(part) if media.title.lower() not in GENERIC_ARTIST_NAMES: if audio_helper and hasattr(audio_helper, 'get_track_genres'): genres = audio_helper.get_track_genres() for genre in genres: if genre not in album_genres: album_genres.append(genre) # Look for artist sort field from first track. # TODO maybe analyse all tracks and only add title_sort if they are the same. if checked_tag == False: checked_tag = True if audio_helper and hasattr(audio_helper, 'get_artist_sort_title'): artist_sort_title = audio_helper.get_artist_sort_title( ) if artist_sort_title and hasattr( metadata, 'title_sort'): metadata.title_sort = artist_sort_title for genre in album_genres: metadata.genres.add(genre) # Now go through this artist's directories looking for additional extras and local art. checked_artist_path = False for artist_file_dir in set(artist_file_dirs): path = helpers.unicodize(artist_file_dir) findArtistExtras(path, extra_type_map, artist_extras, media.title) parentdir = os.path.split(os.path.abspath(path[:-1]))[0] name_parentdir = os.path.basename(parentdir) artist_has_own_dir = False path_to_use = path if normalizeArtist(name_parentdir) == normalizeArtist( media.title): artist_has_own_dir = True path_to_use = parentdir if checked_artist_path is False: checked_artist_path = True path_files = {} for p in os.listdir(path_to_use): path_files[p.lower()] = p # Look for posters and art valid_posters = [] valid_art = [] valid_file_names = getValidFileNamesForArt( config.ARTIST_POSTER_FILES, config.ARTIST_PREFIX, artist_has_own_dir) for file in valid_file_names: if file in path_files.keys(): data = Core.storage.load( os.path.join(path_to_use, path_files[file])) poster_name = hashlib.md5(data).hexdigest() valid_posters.append(poster_name) if poster_name not in metadata.posters: metadata.posters[poster_name] = Proxy.Media( data) valid_file_names = getValidFileNamesForArt( config.ART_FILES, config.ARTIST_PREFIX, artist_has_own_dir) for file in valid_file_names: if file in path_files.keys(): data = Core.storage.load( os.path.join(path_to_use, path_files[file])) art_name = hashlib.md5(data).hexdigest() valid_art.append(art_name) if art_name not in metadata.art: metadata.art[art_name] = Proxy.Media(data) metadata.art.validate_keys(valid_art) metadata.posters.validate_keys(valid_posters) for extra in sorted(artist_extras.values(), key=lambda v: (getExtraSortOrder()[type(v)], v.title)): metadata.extras.add(extra)
def findSubtitles(part): lang_sub_map = {} part_filename = helpers.unicodize(part.file) part_basename = os.path.splitext(os.path.basename(part_filename))[0] paths = [ os.path.dirname(part_filename) ] # Check for a global subtitle location global_subtitle_folder = os.path.join(Core.app_support_path, 'Subtitles') if os.path.exists(global_subtitle_folder): paths.append(global_subtitle_folder) # We start by building a dictionary of files to their absolute paths. We also need to know # the number of media files that are actually present, in case the found local media asset # is limited to a single instance per media file. file_paths = {} total_media_files = 0 for path in paths: path = helpers.unicodize(path) for file_path_listing in os.listdir(path): # When using os.listdir with a unicode path, it will always return a string using the # NFD form. However, we internally are using the form NFC and therefore need to convert # it to allow correct regex / comparisons to be performed. if os.path.isfile(os.path.join(path, helpers.unicodize(file_path_listing))): file_paths[file_path_listing.lower()] = os.path.join(path, file_path_listing) # If we've found an actual media file, we should record it. (root, ext) = os.path.splitext(file_path_listing) if ext.lower()[1:] in config.VIDEO_EXTS: total_media_files += 1 Log('Looking for subtitle media in %d paths with %d media files.', len(paths), total_media_files) Log('Paths: %s', ", ".join([ helpers.unicodize(p) for p in paths ])) for file_path in file_paths.values(): local_basename = helpers.unicodize(os.path.splitext(os.path.basename(file_path))[0]) local_basename2 = local_basename.rsplit('.', 1)[0] filename_matches_part = local_basename == part_basename or local_basename2 == part_basename # If the file is located within the global subtitle folder and it's name doesn't match exactly # then we should simply ignore it. if file_path.count(global_subtitle_folder) and not filename_matches_part: continue # If we have more than one media file within the folder and located filename doesn't match # exactly then we should simply ignore it if total_media_files > 1 and not filename_matches_part: continue subtitle_helper = subtitlehelpers.SubtitleHelpers(file_path) if subtitle_helper != None: local_lang_map = subtitle_helper.process_subtitles(part) for new_language, subtitles in local_lang_map.items(): # Add the possible new language along with the located subtitles so that we can validate them # at the end... if not lang_sub_map.has_key(new_language): lang_sub_map[new_language] = [] lang_sub_map[new_language] = lang_sub_map[new_language] + subtitles # Now whack subtitles that don't exist anymore. for language in lang_sub_map.keys(): part.subtitles[language].validate_keys(lang_sub_map[language]) # Now whack the languages that don't exist anymore. for language in list(set(part.subtitles.keys()) - set(lang_sub_map.keys())): part.subtitles[language].validate_keys({})
def updateAlbum(metadata, media, lang, find_extras=False, artist_extras={}, extra_type_map=None): # Clear out the title to ensure stale data doesn't clobber other agents' contributions. metadata.title = None # clear out genres for this album so we will get genres for all tracks in audio_helper.process_metadata(metadata) metadata.genres.clear() valid_posters = [] valid_art = [] path = None for track in media.tracks: for item in media.tracks[track].items: for part in item.parts: filename = helpers.unicodize(part.file) path = os.path.dirname(filename) (file_root, fext) = os.path.splitext(filename) path_files = {} for p in os.listdir(path): path_files[p.lower()] = p # Look for posters poster_files = config.ALBUM_POSTER_FILES + [ os.path.basename(file_root), helpers.splitPath(path)[-1] ] for ext in config.ART_EXTS: for name in poster_files: file = (name + '.' + ext).lower() if file in path_files.keys(): data = Core.storage.load( os.path.join(path, path_files[file])) poster_name = hashlib.md5(data).hexdigest() valid_posters.append(poster_name) if poster_name not in metadata.posters: metadata.posters[poster_name] = Proxy.Media( data) Log('Local asset image added (poster): ' + file + ', for file: ' + filename) else: Log('Skipping local poster since its already added' ) for name in config.ART_FILES: file = (name + '.' + ext).lower() if file in path_files.keys(): data = Core.storage.load( os.path.join(path, path_files[file])) art_name = hashlib.md5(data).hexdigest() valid_art.append(art_name) if art_name not in metadata.art: metadata.art[art_name] = Proxy.Media(data) Log('Local asset image added (art): ' + file + ', for file: ' + filename) else: Log('Skipping local art since its already added' ) # If there is an appropriate AudioHelper, use it. audio_helper = audiohelpers.AudioHelpers(part.file) if audio_helper != None: try: valid_posters = valid_posters + audio_helper.process_metadata( metadata) # Album sort title. if hasattr(audio_helper, 'get_album_sort_title'): album_sort_title = audio_helper.get_album_sort_title( ) if album_sort_title and hasattr( metadata, 'title_sort'): metadata.title_sort = album_sort_title if hasattr(audio_helper, 'get_track_sort_title'): track_sort_title = audio_helper.get_track_sort_title( ) track_key = media.tracks[track].guid or track if track_sort_title and hasattr( metadata.tracks[track_key], 'title_sort'): metadata.tracks[ track_key].title_sort = track_sort_title except: pass # Look for a video extra for this track. if find_extras: track_video = findTrackExtra(helpers.unicodize(part.file), extra_type_map) if track_video is not None: track_key = media.tracks[track].guid or track metadata.tracks[track_key].extras.add(track_video) metadata.posters.validate_keys(valid_posters) metadata.art.validate_keys(valid_art)
def find_subtitles(part): lang_sub_map = {} part_filename = helpers.unicodize(part.file) part_basename = os.path.splitext(os.path.basename(part_filename))[0] use_filesystem = helpers.cast_bool(Prefs["subtitles.save.filesystem"]) paths = [os.path.dirname(part_filename)] if use_filesystem else [] global_subtitle_folder = None global_folders = [] if use_filesystem: # Check for local subtitles subdirectory sub_dir_base = paths[0] sub_dir_list = [] if Prefs["subtitles.save.subFolder"] != "current folder": # got selected subfolder sub_dir_list.append( os.path.join(sub_dir_base, Prefs["subtitles.save.subFolder"])) sub_dir_custom = Prefs["subtitles.save.subFolder.Custom"].strip() \ if Prefs["subtitles.save.subFolder.Custom"] else None if sub_dir_custom: # got custom subfolder sub_dir_custom = os.path.normpath(sub_dir_custom) if os.path.isdir(sub_dir_custom) and os.path.isabs(sub_dir_custom): # absolute folder sub_dir_list.append(sub_dir_custom) global_folders.append(sub_dir_custom) else: # relative folder fld = os.path.join(sub_dir_base, sub_dir_custom) sub_dir_list.append(fld) for sub_dir in sub_dir_list: if os.path.isdir(sub_dir): paths.append(sub_dir) # Check for a global subtitle location global_subtitle_folder = os.path.join(Core.app_support_path, 'Subtitles') if os.path.exists(global_subtitle_folder): paths.append(global_subtitle_folder) global_folders.append(global_subtitle_folder) # normalize all paths paths = [ os.path.normpath(os.path.realpath(helpers.unicodize(path))) for path in paths ] # We start by building a dictionary of files to their absolute paths. We also need to know # the number of media files that are actually present, in case the found local media asset # is limited to a single instance per media file. # file_paths = {} total_media_files = 0 media_files = [] for path in paths: for file_path_listing in os.listdir(path.encode( sz_config.fs_encoding)): # When using os.listdir with a unicode path, it will always return a string using the # NFD form. However, we internally are using the form NFC and therefore need to convert # it to allow correct regex / comparisons to be performed. # file_path_listing = helpers.unicodize(file_path_listing) if os.path.isfile( os.path.join(path, file_path_listing).encode( sz_config.fs_encoding)): file_paths[file_path_listing.lower()] = os.path.join( path, file_path_listing) # If we've found an actual media file, we should record it. (root, ext) = os.path.splitext(file_path_listing) if ext.lower()[1:] in config.VIDEO_EXTS: total_media_files += 1 # collect found media files media_files.append(root) # cleanup any leftover subtitle if no associated media file was found if helpers.cast_bool(Prefs["subtitles.autoclean"]): for path in paths: # we can't housekeep the global subtitle folders as we don't know about *all* media files # in a library; skip them skip_path = False for fld in global_folders: if path.startswith(fld): Log.Info("Skipping housekeeping of folder: %s", path) skip_path = True break if skip_path: continue for file_path_listing in os.listdir( path.encode(sz_config.fs_encoding)): file_path_listing = helpers.unicodize(file_path_listing) enc_fn = os.path.join(path, file_path_listing).encode( sz_config.fs_encoding) if os.path.isfile(enc_fn): (root, ext) = os.path.splitext(file_path_listing) # it's a subtitle file if ext.lower()[1:] in config.SUBTITLE_EXTS: # get fn without forced/default/normal tag split_tag = root.rsplit(".", 1) if len(split_tag) > 1 and split_tag[1].lower() in [ 'forced', 'normal', 'default' ]: root = split_tag[0] # get associated media file name without language sub_fn = subtitlehelpers.ENDSWITH_LANGUAGECODE_RE.sub( "", root) # subtitle basename and basename without possible language tag not found in collected # media files? kill. if root not in media_files and sub_fn not in media_files: Log.Info("Removing leftover subtitle: %s", os.path.join(path, file_path_listing)) try: os.remove(enc_fn) except (OSError, IOError): Log.Error("Removing failed") Log('Looking for subtitle media in %d paths with %d media files.', len(paths), total_media_files) Log('Paths: %s', ", ".join([helpers.unicodize(p) for p in paths])) for file_path in file_paths.values(): local_filename = os.path.basename(file_path) bn, ext = os.path.splitext(local_filename) local_basename = helpers.unicodize(bn) # get fn without forced/default/normal tag split_tag = local_basename.rsplit(".", 1) if len(split_tag) > 1 and split_tag[1].lower() in [ 'forced', 'normal', 'default' ]: local_basename = split_tag[0] # split off possible language tag local_basename2 = local_basename.rsplit('.', 1)[0] filename_matches_part = local_basename == part_basename or local_basename2 == part_basename filename_contains_part = part_basename in local_basename if not ext.lower()[1:] in config.SUBTITLE_EXTS: continue # if the file is located within the global subtitle folders and its name doesn't match exactly, ignore it if global_folders and not filename_matches_part: skip_path = False for fld in global_folders: if file_path.startswith(fld): skip_path = True break if skip_path: continue # determine whether to pick up the subtitle based on our match strictness elif not filename_matches_part: if sz_config.ext_match_strictness == "strict" or ( sz_config.ext_match_strictness == "loose" and not filename_contains_part): Log.Debug("%s doesn't match %s, skipping" % (helpers.unicodize(local_filename), helpers.unicodize(part_basename))) continue subtitle_helper = subtitlehelpers.subtitle_helpers(file_path) if subtitle_helper is not None: local_lang_map = subtitle_helper.process_subtitles(part) for new_language, subtitles in local_lang_map.items(): # Add the possible new language along with the located subtitles so that we can validate them # at the end... # if not lang_sub_map.has_key(new_language): lang_sub_map[new_language] = [] lang_sub_map[ new_language] = lang_sub_map[new_language] + subtitles # add known metadata subs to our sub list if not use_filesystem: for language, sub_list in subtitlehelpers.get_subtitles_from_metadata( part).iteritems(): if sub_list: if language not in lang_sub_map: lang_sub_map[language] = [] lang_sub_map[language] = lang_sub_map[language] + sub_list # Now whack subtitles that don't exist anymore. for language in lang_sub_map.keys(): part.subtitles[language].validate_keys(lang_sub_map[language]) # Now whack the languages that don't exist anymore. for language in list( set(part.subtitles.keys()) - set(lang_sub_map.keys())): part.subtitles[language].validate_keys({})
'behindthescenes': BehindTheScenesObject, 'interview': InterviewObject, 'scene': SceneOrSampleObject, 'featurette': FeaturetteObject, 'short': ShortObject, 'other': OtherObject } # We start by building a dictionary of files to their absolute paths. We also need to know # the number of media files that are actually present, in case the found local media asset # is limited to a single instance per media file. # path_files = {} multi_parts = [] total_media_files = 0 root_file = getRootFile(helpers.unicodize( parts[0].file)) if parts else None for path in paths: path = helpers.unicodize(path) for file_path in sorted(os.listdir(path)): # When using os.listdir with a unicode path, it will always return a string using the # NFD form. However, we internally are using the form NFC and therefore need to convert # it to allow correct regex / comparisons to be performed. # file_path = helpers.unicodize(file_path) full_path = os.path.join(path, file_path) if os.path.isfile(full_path): path_files[file_path.lower()] = full_path # Only count real and distinct (not stacked) video files.