def check_directories(self, directory, rel_path, album_directories, options): """Checks an export directory for obsolete files.""" if options.ignore: exclude_pattern = re.compile(su.fsdec(options.ignore)) if exclude_pattern.match(os.path.split(directory)[1]): return True if not os.path.exists(directory): return True contains_albums = False for f in su.os_listdir_unicode(directory): if self._check_abort(): return album_file = os.path.join(directory, f) if os.path.isdir(album_file): if f == "iPod Photo Cache": su.pout("Skipping " + album_file) continue rel_path_file = os.path.join(rel_path, f) if album_file in album_directories: contains_albums = True elif not self.check_directories(album_file, rel_path_file, album_directories, options): delete_album_file(album_file, directory, "Obsolete directory", options) else: contains_albums = True else: # we won't touch some files if imageutils.is_ignore(f): continue delete_album_file(album_file, directory, "Obsolete", options) return contains_albums
def format_photo_name(photo, album_name, index, padded_index, name_template): """Formats an image name based on a template.""" # default image caption filenames have the file extension on them # already, so remove it or the export filename will look like # "IMG 0087 JPG.jpg" orig_basename = re.sub( re.compile(r'\.(jpeg|jpg|mpg|mpeg|mov|png|tif|tiff)$', re.IGNORECASE), '', photo.caption) if photo.date: year = str(photo.date.year) month = str(photo.date.month).zfill(2) day = str(photo.date.day).zfill(2) else: year = '' month = '' day = '' nodate_album_name = album_name match = re.match(_YEAR_PATTERN_INDEX, nodate_album_name) if match: nodate_album_name = match.group(2) nodate_event_name = photo.event_name match = re.match(_YEAR_PATTERN_INDEX, nodate_event_name) if match: nodate_event_name = match.group(2) ascii_title = orig_basename.encode('ascii', 'replace') plain_title = ascii_title.replace(' ', '') clear_title = _CLEAR_TEXT_PATTERN.sub("_", orig_basename) ascii_album_name = album_name.encode('ascii', 'replace') plain_album_name = ascii_album_name.replace(' ', '') ascii_event = photo.event_name.encode('ascii', 'replace') plain_event = ascii_event.replace(' ', '') try: formatted_name = name_template.format(index=index, index0=padded_index, event_index=photo.event_index, event_index0=photo.event_index0, album=album_name, ascii_album=ascii_album_name, plain_album=plain_album_name, event=photo.event_name, ascii_event=ascii_event, plain_event=plain_event, nodate_album=nodate_album_name, nodate_event=nodate_event_name, title=orig_basename, caption=orig_basename, # backward compatibility ascii_title=ascii_title, plain_title=plain_title, yyyy=year, mm=month, dd=day, clear_title=clear_title) except KeyError, ex: su.pout(u'Unrecognized field in name template: %s. Use one of: index, index0, event_index, ' 'event_index0, album, ascii_album, event, ascii_event, title, ascii_title, clear_title, ' 'yyyy, mm, or dd.' % (str(ex))) formatted_name = name_template
def check_directories(self, directory, rel_path, album_directories, options): """Checks an export directory for obsolete files.""" if options.ignore: exclude_pattern = re.compile(su.fsdec(options.ignore)) if exclude_pattern.match(os.path.split(directory)[1]): return True if not os.path.exists(directory): return True contains_albums = False for f in su.os_listdir_unicode(directory): if self._check_abort(): return album_file = os.path.join(directory, f) if os.path.isdir(album_file): if f == "iPod Photo Cache": su.pout("Skipping " + album_file) continue rel_path_file = os.path.join(rel_path, f) if album_file in album_directories: contains_albums = True elif not self.check_directories(album_file, rel_path_file, album_directories, options): delete_album_file(album_file, directory, "Obsolete directory", options) else: # we won't touch some files if imageutils.is_ignore(f): continue delete_album_file(album_file, directory, "Obsolete", options) return contains_albums
def format_photo_name(photo, album_name, index, padded_index, name_template): """Formats an image name based on a template.""" # default image caption filenames have the file extension on them # already, so remove it or the export filename will look like # "IMG 0087 JPG.jpg" orig_basename = re.sub( re.compile(r'\.(jpeg|jpg|mpg|mpeg|mov|png|tif|tiff)$', re.IGNORECASE), '', photo.caption) if photo.date: year = str(photo.date.year) month = str(photo.date.month).zfill(2) day = str(photo.date.day).zfill(2) else: year = '' month = '' day = '' nodate_album_name = album_name match = re.match(_YEAR_PATTERN_INDEX, nodate_album_name) if match: nodate_album_name = match.group(2) nodate_event_name = photo.event_name match = re.match(_YEAR_PATTERN_INDEX, nodate_event_name) if match: nodate_event_name = match.group(2) ascii_title = orig_basename.encode('ascii', 'replace') plain_title = ascii_title.replace(' ', '') ascii_album_name = album_name.encode('ascii', 'replace') plain_album_name = ascii_album_name.replace(' ', '') ascii_event = photo.event_name.encode('ascii', 'replace') plain_event = ascii_event.replace(' ', '') try: formatted_name = name_template.format( index=index, index0=padded_index, event_index=photo.event_index, event_index0=photo.event_index0, album=album_name, ascii_album=ascii_album_name, plain_album=plain_album_name, event=photo.event_name, ascii_event=ascii_event, plain_event=plain_event, nodate_album=nodate_album_name, nodate_event=nodate_event_name, title=orig_basename, caption=orig_basename, # backward compatibility ascii_title=ascii_title, plain_title=plain_title, yyyy=year, mm=month, dd=day) except KeyError, ex: su.pout( u'Unrecognized field in name template: %s. Use one of: index, index0, event_index, ' 'event_index0, album, ascii_album, event, ascii_event, title, ascii_title, ' 'yyyy, mm, or dd.' % (str(ex))) formatted_name = name_template
def load_aperture_originals(self): """Attempts to locate the original image files (Masters). Only works if the masters are stored in the library.""" if not self.aperture or self.aperture_data: return su.pout('Scanning for Aperture Originals...') for image in self.images_by_id.values(): image.find_aperture_original()
def is_expired(self): """Tests if the cached IptcData are expired (too old).""" now = time.time() # Add a 20% jitter to the cache to avoid expiring all entires at the same time. if now - self.created > CACHE_MAX_AGE - CACHE_MAX_AGE * 0.2 * random.random(): su.pout("Cached data for %s too old, ignoring" % (self.file_path)) return True return False
def load_aperture_originals(self): """Attempts to locate the original image files (Masters). Only works if the masters are stored in the library.""" if not self.aperture or self.aperture_data: return su.pout('Scanning for Originals...') for image in self.images_by_id.values(): image.find_aperture_original()
def is_expired(self): """Tests if the cached IptcData are expired (too old).""" now = time.time() # Add a 20% jitter to the cache to avoid expiring all entires at the same time. if now - self.created > CACHE_MAX_AGE - CACHE_MAX_AGE * 0.2 * random.random(): su.pout("Cached data for %s too old, ignoring" % (self.file_path)) return True return False
def get_iphoto_data(album_xml_file, ratings=None, verbose=False, aperture=False): """reads the iPhoto database and converts it into an iPhotoData object.""" library_dir = os.path.dirname(album_xml_file) is_aperture = aperture or album_xml_file.endswith('ApertureData.xml') if verbose: print "Reading %s database from %s..." % ( 'Aperture' if is_aperture else 'iPhoto', album_xml_file) album_xml = applexml.read_applexml_fixed(album_xml_file) album_xml2 = None if is_aperture: try: import appledata.aperturedata as aperturedata aperture_data = aperturedata.get_aperture_data( library_dir, verbose) except ImportError: aperture_data = None else: aperture_data = None # Recent iPhoto versions write event and album data into # iLifeShared/AlbumData2.xml. album_xml_file2 = os.path.join( os.path.split(album_xml_file)[0], "iLifeShared", "AlbumData2.xml") if os.path.exists(album_xml_file2): if verbose: su.pout("Reading event and album data from %s..." % (album_xml_file2)) album_xml2 = applexml.read_applexml(album_xml_file2) data = IPhotoData(album_xml, album_xml2, ratings, is_aperture, aperture_data) if is_aperture: if (not data.applicationVersion.startswith('3.') and not data.applicationVersion.startswith('9.')): raise ValueError, "Aperture version %s not supported" % ( data.applicationVersion) else: if (not data.applicationVersion.startswith("9.") and not data.applicationVersion.startswith("8.") and not data.applicationVersion.startswith("7.") and not data.applicationVersion.startswith("6.")): raise ValueError, "iPhoto version %s not supported" % ( data.applicationVersion) #if not data.has_comments(): # raise ValueError, """Did not find any comments in the iPhoto database. #iPhoto sometimes writes the database incorrectly. Please restart iPhoto to reset. #""" return data
def check_photos(self): """Attempts to verify that the data are not corrupt by checking the "Photos" album against the image list. """ photos = None for album in self.albums.values(): if album.master: photos = album break if not photos: su.pout("No Photos album in library.") return # Check size of Photos album vs. Master Image List if photos.size != len(self.images_by_id): su.pout( "Warning: Master image list has %d images, but Photos album has %d images." % (len(self.images_by_id), photos.size)) # Cross check Photos vs. Master Image List photos_ids = {} for photo in photos.images: photos_ids[ photo. id] = photo # Make a map of Photos by id for the second phase below if not self.images_by_id.has_key(photo.id): su.pout( "Warning: only in Photos album, but not in Master Image List: %s" % (photo.caption)) print photo for image in self.images: if not photos_ids.has_key(image.id): su.pout( "Warning: only in Master Image List, but not in Photos album: %s" % (image.caption)) print image
def _generate_original(self, options): """Exports the original file.""" do_original_export = False export_dir = os.path.split(self.original_export_file)[0] if not os.path.exists(export_dir): su.pout("Creating folder " + export_dir) if not options.dryrun: os.mkdir(export_dir) original_source_file = su.resolve_alias(self.photo.originalpath) if os.path.exists(self.original_export_file): # In link mode, check the inode. if options.link: export_stat = os.stat(self.original_export_file) source_stat = os.stat(original_source_file) if export_stat.st_ino != source_stat.st_ino: su.pout('Changed: %s: inodes don\'t match: %d vs. %d' % (self.original_export_file, export_stat.st_ino, source_stat.st_ino)) do_original_export = True if (os.path.getmtime(self.original_export_file) + _MTIME_FUDGE < os.path.getmtime(original_source_file)): su.pout( 'Changed: %s: newer version is available: %s vs. %s' % (self.original_export_file, time.ctime(os.path.getmtime(self.original_export_file)), time.ctime(os.path.getmtime(original_source_file)))) do_original_export = True elif not self.size: source_size = os.path.getsize(original_source_file) export_size = os.path.getsize(self.original_export_file) diff = abs(source_size - export_size) if diff > _MAX_FILE_DIFF or (diff > 0 and options.link): su.pout( u'Changed: %s: file size: %d vs. %d' % (self.original_export_file, export_size, source_size)) do_original_export = True else: do_original_export = True do_iptc = (options.iptc == 1 and do_original_export) or options.iptc == 2 if do_iptc and (options.link or options.iptc_masters): if self.check_iptc_data(original_source_file, options, is_original=True, file_updated=do_original_export): do_original_export = True exists = True # True if the file exists or was updated. if do_original_export: exists = imageutils.copy_or_link_file(original_source_file, self.original_export_file, options.dryrun, options.link, self.size, options) else: _logger.debug(u'%s up to date.', self.original_export_file) if exists and do_iptc and not options.link: self.check_iptc_data(self.original_export_file, options, is_original=True, file_updated=do_original_export)
def get_iptc_data(self, image_path): """Gets IptcData for an image, either from cache, or by running exiftool. Returns an empty IptcData object if iptc data could not be read from file.""" iptc_cache_entry = self.cache.get(image_path) if iptc_cache_entry and not iptc_cache_entry.is_current(): iptc_cache_entry = None if not iptc_cache_entry: su.pout(u"Running exiftool for %s" % (image_path)) iptc_data = _get_iptc_data_exiftool(image_path) iptc_cache_entry = _IptcCacheEntry(image_path, iptc_data) self.cache[image_path] = iptc_cache_entry self._save() return iptc_cache_entry.get_iptc_data()
def check_photos(self): """Attempts to verify that the data are not corrupt by checking the "Photos" album against the image list. """ photos = None for album in self.albums.values(): if album.master: photos = album break if not photos: su.pout("No Photos album in library.") return # Check size of Photos album vs. Master Image List if photos.size != len(self.images_by_id): su.pout("Warning: Master image list has %d images, but Photos album has %d images." % ( len(self.images_by_id), photos.size)) # Cross check Photos vs. Master Image List photos_ids = {} for photo in photos.images: photos_ids[photo.id] = photo # Make a map of Photos by id for the second phase below if not self.images_by_id.has_key(photo.id): su.pout("Warning: only in Photos album, but not in Master Image List: %s" % ( photo.caption)) print photo for image in self.images: if not photos_ids.has_key(image.id): su.pout("Warning: only in Master Image List, but not in Photos album: %s" % ( image.caption)) print image
def get_iptc_data(self, image_path): """Gets IptcData for an image, either from cache, or by running exiftool. Returns an empty IptcData object if iptc data could not be read from file.""" iptc_cache_entry = self.cache.get(image_path) if iptc_cache_entry and not iptc_cache_entry.is_current(): iptc_cache_entry = None if not iptc_cache_entry: su.pout(u"Running exiftool for %s" % (image_path)) iptc_data = _get_iptc_data_exiftool(image_path) iptc_cache_entry = _IptcCacheEntry(image_path, iptc_data) self.cache[image_path] = iptc_cache_entry self._save() return iptc_cache_entry.get_iptc_data()
def format_album_name(album, name, folder_template): """Formats a folder name using a template. Args: album - an IPhotoContainer. name - name of the album (typically from album.album_name) folder_template - a format string. """ if name is None: name = '' ascii_name = name.encode('ascii', 'replace') plain_name = ascii_name.replace(' ', '') clear_name = _CLEAR_TEXT_PATTERN.sub("_", name) nodate_name = name match = re.match(_YEAR_PATTERN_INDEX, name) if match: nodate_name = match.group(2) if album.date: year = str(album.date.year) month = str(album.date.month).zfill(2) day = str(album.date.day).zfill(2) else: year = '' month = '' day = '' folderhint = album.getfolderhint() if not folderhint: folderhint = '' try: return folder_template.format( album=name, name=name, ascii_name=ascii_name, plain_name=plain_name, nodate_album=nodate_name, hint=folderhint, yyyy=year, mm=month, dd=day, clear_name=clear_name, id=album.albumid) except KeyError, ex: su.pout(u'Unrecognized field in folder template: %s. Use one of: id, name, ascii_name, ' 'plain_name, clear_name, hint, yyyy, mm, dd.' % (str(ex))) return folder_template
def generate_files(self, client, options): """Generates the files in the export location.""" for f in sorted(self.files): # In dryrun mode, an online_album might not exist if self.online_album: try: self.files[f].generate(client, self.online_album.gphoto_id.text, options) except gdata.photos.service.GooglePhotosException, e: print >> sys.stderr, 'Failed to upload %s: %s' % ( self.files[f].export_file, str(e)) else: su.pout(u"Skipping files for %s because online album does not exist." % ( self.name))
def generate_files(self, client, options): """Generates the files in the export location.""" for f in sorted(self.files): # In dryrun mode, an online_album might not exist if self.online_album: try: self.files[f].generate(client, self.online_album.gphoto_id.text, options) except gdata.photos.service.GooglePhotosException, e: print >> sys.stderr, 'Failed to upload %s: %s' % ( self.files[f].export_file, str(e)) else: su.pout( u"Skipping files for %s because online album does not exist." % (self.name))
def _generate_original(self, options): """Exports the original file.""" do_original_export = False export_dir = os.path.split(self.original_export_file)[0] if not os.path.exists(export_dir): su.pout("Creating folder " + export_dir) if not options.dryrun: os.mkdir(export_dir) original_source_file = su.resolve_alias(self.photo.originalpath) if os.path.exists(self.original_export_file): # In link mode, check the inode. if options.link: export_stat = os.stat(self.original_export_file) source_stat = os.stat(original_source_file) if export_stat.st_ino != source_stat.st_ino: su.pout('Changed: %s: inodes don\'t match: %d vs. %d' % (self.original_export_file, export_stat.st_ino, source_stat.st_ino)) do_original_export = True if (os.path.getmtime(self.original_export_file) + _MTIME_FUDGE < os.path.getmtime(original_source_file)): su.pout('Changed: %s: newer version is available: %s vs. %s' % (self.original_export_file, time.ctime(os.path.getmtime( self.original_export_file)), time.ctime(os.path.getmtime(original_source_file)))) do_original_export = True elif not self.size: source_size = os.path.getsize(original_source_file) export_size = os.path.getsize(self.original_export_file) diff = abs(source_size - export_size) if diff > _MAX_FILE_DIFF or (diff > 0 and options.link): su.pout(u'Changed: %s: file size: %d vs. %d' % (self.original_export_file, export_size, source_size)) do_original_export = True else: do_original_export = True do_iptc = (options.iptc == 1 and do_original_export) or options.iptc == 2 if do_iptc and (options.link or options.iptc_masters): if self.check_iptc_data(original_source_file, options, is_original=True, file_updated=do_original_export): do_original_export = True exists = True # True if the file exists or was updated. if do_original_export: exists = imageutils.copy_or_link_file(original_source_file, self.original_export_file, options.dryrun, options.link, self.size, options) else: _logger.debug(u'%s up to date.', self.original_export_file) if exists and do_iptc and not options.link: self.check_iptc_data(self.original_export_file, options, is_original=True, file_updated=do_original_export)
def find_aperture_original(self): """Attempts to locate the Aperture Master image. Works only for .jpg masters that are stored in the Aperture library. Saves the result as originalpath.""" master_path = _get_aperture_master_path(self.image_path) if not os.path.exists(master_path): return basename = su.getfilebasename(self.image_path) file_name = os.path.join(master_path, basename + '.jpg') if os.path.exists(file_name): self.originalpath = file_name return path = self._search_for_file(master_path, basename + '.') if path: self.originalpath = path return su.pout(u"No master for %s" % (self.image_path))
def find_aperture_original(self): """Attempts to locate the Aperture Master image. Works only for .jpg masters that are stored in the Aperture library. Saves the result as originalpath.""" master_path = _get_aperture_master_path(self.image_path) if not os.path.exists(master_path): return basename = su.getfilebasename(self.image_path) file_name = os.path.join(master_path, basename + '.jpg') if os.path.exists(file_name): self.originalpath = file_name return path = self._search_for_file(master_path, basename + '.') if path: self.originalpath = path return su.pout(u"No master for %s" % (self.image_path))
def format_album_name(album, name, folder_template): """Formats a folder name using a template. Args: album - an IPhotoContainer. name - name of the album (typically from album.album_name) folder_template - a format string. """ if name is None: name = '' ascii_name = name.encode('ascii', 'replace') plain_name = ascii_name.replace(' ', '') nodate_name = name match = re.match(_YEAR_PATTERN_INDEX, name) if match: nodate_name = match.group(2) if album.date: year = str(album.date.year) month = str(album.date.month).zfill(2) day = str(album.date.day).zfill(2) else: year = '' month = '' day = '' folderhint = album.getfolderhint() if not folderhint: folderhint = '' try: return folder_template.format(album=name, name=name, ascii_name=ascii_name, plain_name=plain_name, nodate_album=nodate_name, hint=folderhint, yyyy=year, mm=month, dd=day) except KeyError, ex: su.pout( u'Unrecognized field in folder template: %s. Use one of: name, ascii_name, ' 'plain_name, hint, yyyy, mm, dd.' % (str(ex))) return folder_template
def get_iphoto_data(album_xml_file, ratings=None, verbose=False, aperture=False): """reads the iPhoto database and converts it into an iPhotoData object.""" library_dir = os.path.dirname(album_xml_file) is_aperture = aperture or album_xml_file.endswith('ApertureData.xml') if verbose: print "Reading %s database from %s..." % ( 'Aperture' if is_aperture else 'iPhoto', album_xml_file) album_xml = applexml.read_applexml_fixed(album_xml_file) album_xml2 = None if is_aperture: try: import appledata.aperturedata as aperturedata aperture_data = aperturedata.get_aperture_data(library_dir, verbose) except ImportError: aperture_data = None else: aperture_data = None # Recent iPhoto versions write event and album data into # iLifeShared/AlbumData2.xml. album_xml_file2 = os.path.join(os.path.split(album_xml_file)[0], "iLifeShared", "AlbumData2.xml") if os.path.exists(album_xml_file2): if verbose: su.pout("Reading event and album data from %s..." % (album_xml_file2)) album_xml2 = applexml.read_applexml(album_xml_file2) data = IPhotoData(album_xml, album_xml2, ratings, is_aperture, aperture_data) if is_aperture: if (not data.applicationVersion.startswith('3.') and not data.applicationVersion.startswith('9.')): raise ValueError, "Aperture version %s not supported" % ( data.applicationVersion) else: if (not data.applicationVersion.startswith("9.") and not data.applicationVersion.startswith("8.") and not data.applicationVersion.startswith("7.") and not data.applicationVersion.startswith("6.")): raise ValueError, "iPhoto version %s not supported" % ( data.applicationVersion) #if not data.has_comments(): # raise ValueError, """Did not find any comments in the iPhoto database. #iPhoto sometimes writes the database incorrectly. Please restart iPhoto to reset. #""" return data
def print_summary(self): if self.albumtype != "Event": return original_count = 0 file_size = 0 original_size = 0 face_count = 0 for image in self.images: face_count += len(image.getfaces()) if image.originalpath: original_count += 1 if os.path.exists(image.originalpath): original_size += os.path.getsize(image.originalpath) if os.path.exists(image.image_path): file_size += os.path.getsize(image.image_path) if not image.originalpath: original_size += os.path.getsize(image.image_path) file_size = file_size / 1024.0 / 1024.0 original_size = original_size / 1024.0 / 1024.0 su.pout(u"%-50s %4d images (%6.1f MB), %3d originals (%6.1f MB), %3d faces" % ( self.tostring(), len(self.images), file_size, original_count, original_size, face_count))
def load_album(self, options): """walks the album directory tree, and scans it for existing files.""" if not os.path.exists(self.albumdirectory): su.pout("Creating folder " + self.albumdirectory) if not options.dryrun: os.makedirs(self.albumdirectory) else: return file_list = os.listdir(self.albumdirectory) if file_list is None: return for f in sorted(file_list): # we won't touch some files if imageutils.is_ignore(f): continue album_file = unicodedata.normalize("NFC", os.path.join(self.albumdirectory, f)) if os.path.isdir(album_file): if (options.originals and (f == "Originals" or (options.picasa and f == ".picasaoriginals"))): self.scan_originals(album_file, options) continue else: delete_album_file(album_file, self.albumdirectory, "Obsolete export directory", options) continue base_name = unicodedata.normalize("NFC", su.getfilebasename(album_file)) master_file = self.files.get(base_name.lower()) # everything else must have a master, or will have to go if master_file is None or not master_file.is_part_of(album_file): delete_album_file(album_file, self.albumdirectory, "Obsolete exported file", options)
def load_album(self, options): """walks the album directory tree, and scans it for existing files.""" if not os.path.exists(self.albumdirectory): su.pout("Creating folder " + self.albumdirectory) if not options.dryrun: os.makedirs(self.albumdirectory) else: return file_list = os.listdir(self.albumdirectory) if file_list is None: return for f in sorted(file_list): # we won't touch some files if imageutils.is_ignore(f): continue album_file = unicodedata.normalize("NFC", os.path.join(self.albumdirectory, f)) if os.path.isdir(album_file): if (options.originals and (f == "Originals" or (options.picasa and f == ".picasaoriginals"))): self.scan_originals(album_file, options) continue else: delete_album_file(album_file, self.albumdirectory, "Obsolete export directory", options) continue base_name = unicodedata.normalize("NFC", su.getfilebasename(album_file)) master_file = self.files.get(base_name.lower()) # everything else must have a master, or will have to go if master_file is None or not master_file.is_part_of(album_file): delete_album_file(album_file, self.albumdirectory, "Obsolete exported file", options)
def _check_need_to_export(self, source_file, options): """Returns true if the image file needs to be exported. Args: source_file: path to image file, with aliases resolved. options: processing options. """ if not os.path.exists(self.export_file): return True if (os.path.getmtime(self.export_file) + _MTIME_FUDGE < os.path.getmtime(source_file)): su.pout('Changed: %s: newer version is available: %s vs. %s' % (self.export_file, time.ctime(os.path.getmtime(self.export_file)), time.ctime(os.path.getmtime(source_file)))) return True if not options.size: # With creative renaming in iPhoto it is possible to get # stale files if titles get swapped between images. Double # check the size, allowing for some difference for meta data # changes made in the exported copy source_size = os.path.getsize(source_file) export_size = os.path.getsize(self.export_file) diff = abs(source_size - export_size) if diff > _MAX_FILE_DIFF or (diff > 32 and options.link): su.pout('Changed: %s: file size: %d vs. %d' % (self.export_file, export_size, source_size)) return True # In link mode, we don't need to check the modification date in the # database because we catch the changes by the size check above. #if (not options.link and # datetime.datetime.fromtimestamp(os.path.getmtime( # self.export_file)) < self.photo.mod_date): # su.pout('Changed: %s: modified in iPhoto: %s vs. %s ' % ( # self.export_file, # time.ctime(os.path.getmtime(self.export_file)), # self.photo.mod_date)) # return True return False
def _check_need_to_export(self, source_file, options): """Returns true if the image file needs to be exported. Args: source_file: path to image file, with aliases resolved. options: processing options. """ if not os.path.exists(self.export_file): return True if (os.path.getmtime(self.export_file) + _MTIME_FUDGE < os.path.getmtime(source_file)): su.pout('Changed: %s: newer version is available: %s vs. %s' % (self.export_file, time.ctime(os.path.getmtime(self.export_file)), time.ctime(os.path.getmtime(source_file)))) return True if not options.size: # With creative renaming in iPhoto it is possible to get # stale files if titles get swapped between images. Double # check the size, allowing for some difference for meta data # changes made in the exported copy source_size = os.path.getsize(source_file) export_size = os.path.getsize(self.export_file) diff = abs(source_size - export_size) if diff > _MAX_FILE_DIFF or (diff > 32 and options.link): su.pout('Changed: %s: file size: %d vs. %d' % (self.export_file, export_size, source_size)) return True # In link mode, we don't need to check the modification date in the # database because we catch the changes by the size check above. #if (not options.link and # datetime.datetime.fromtimestamp(os.path.getmtime( # self.export_file)) < self.photo.mod_date): # su.pout('Changed: %s: modified in iPhoto: %s vs. %s ' % ( # self.export_file, # time.ctime(os.path.getmtime(self.export_file)), # self.photo.mod_date)) # return True return False
def print_summary(self): if self.albumtype != "Event": return original_count = 0 file_size = 0 original_size = 0 face_count = 0 for image in self.images: face_count += len(image.getfaces()) if image.originalpath: original_count += 1 if os.path.exists(image.originalpath): original_size += os.path.getsize(image.originalpath) if os.path.exists(image.image_path): file_size += os.path.getsize(image.image_path) if not image.originalpath: original_size += os.path.getsize(image.image_path) file_size = file_size / 1024.0 / 1024.0 original_size = original_size / 1024.0 / 1024.0 su.pout( u"%-50s %4d images (%6.1f MB), %3d originals (%6.1f MB), %3d faces" % (self.tostring(), len(self.images), file_size, original_count, original_size, face_count))
def _check_person_iptc_data(self, export_file, region_rectangles, region_names, do_faces): """Tests if the person names or regions in the export file need to be updated. Returns: (new_rectangles, new_persons), or (None, None) """ if do_faces: photo_rectangles = self.get_photo_rectangles() photo_faces = self.photo.faces else: photo_rectangles = [] photo_faces = [] combined_region_names = ','.join(region_names) combined_photo_faces = ','.join(photo_faces) if combined_region_names != combined_photo_faces: su.pout('Updating IPTC for %s because of persons (%s instead of %s)' % (export_file, combined_region_names, combined_photo_faces)) return (photo_rectangles, photo_faces) if len(region_rectangles) != len(photo_rectangles): su.pout('Updating IPTC for %s because of number of regions ' '(%d vs %d)' % (export_file, len(region_rectangles), len(photo_rectangles))) return (photo_rectangles, photo_faces) for p in xrange(len(region_rectangles)): if not region_matches(region_rectangles[p], photo_rectangles[p]): su.pout('Updating IPTC for %s because of region for %s ' '(%s vs %s)' % (export_file, region_names[p], ','.join(str(c) for c in region_rectangles[p]), ','.join(str(c) for c in photo_rectangles[p]))) return (photo_rectangles, photo_faces) return (None, None)
def _check_person_iptc_data(self, export_file, region_rectangles, region_names, do_faces): """Tests if the person names or regions in the export file need to be updated. Returns: (new_rectangles, new_persons), or (None, None) """ if do_faces: photo_rectangles = self.get_photo_rectangles() photo_faces = self.photo.faces else: photo_rectangles = [] photo_faces = [] combined_region_names = ','.join(region_names) combined_photo_faces = ','.join(photo_faces) if combined_region_names != combined_photo_faces: su.pout( 'Updating IPTC for %s because of persons (%s instead of %s)' % (export_file, combined_region_names, combined_photo_faces)) return (photo_rectangles, photo_faces) if len(region_rectangles) != len(photo_rectangles): su.pout( 'Updating IPTC for %s because of number of regions ' '(%d vs %d)' % (export_file, len(region_rectangles), len(photo_rectangles))) return (photo_rectangles, photo_faces) for p in xrange(len(region_rectangles)): if not region_matches(region_rectangles[p], photo_rectangles[p]): su.pout('Updating IPTC for %s because of region for %s ' '(%s vs %s)' % (export_file, region_names[p], ','.join( str(c) for c in region_rectangles[p]), ','.join( str(c) for c in photo_rectangles[p]))) return (photo_rectangles, photo_faces) return (None, None)
def process_albums(self, albums, album_types, folder_prefix, includes, excludes, options, matched=False): """Walks trough an iPhoto album tree, and discovers albums (directories).""" include_pattern = re.compile(su.unicode_string(includes)) exclude_pattern = None if excludes: exclude_pattern = re.compile(su.unicode_string(excludes)) # Figure out the folder patterns (if any) folderpatterns = [] if options.folderpatterns: for pattern in su.unicode_string(options.folderpatterns).split(','): (expression, folder) = pattern.split('/', 2) folderpatterns.append((re.compile(expression), folder)) # first, do the sub-albums for sub_album in albums: if self._check_abort(): return sub_name = sub_album.name if not sub_name: print "Found an album with no name: " + sub_album.albumid sub_name = "xxx" # check the album type if sub_album.albumtype == "Folder" or sub_album.albums: sub_matched = matched if include_pattern.match(sub_name): sub_matched = True new_name = folder_prefix if sub_album.albumtype == "Folder": new_name += imageutils.make_foldername(sub_name) + "/" self.process_albums(sub_album.albums, album_types, new_name, includes, excludes, options, sub_matched) continue elif (sub_album.albumtype == "None" or not sub_album.albumtype in album_types): # print "Ignoring " + sub_album.name + " of type " + \ # sub_album.albumtype continue if not matched and not include_pattern.match(sub_name): _logger.debug(u'Skipping "%s" because it does not match pattern.', sub_name) continue if exclude_pattern and exclude_pattern.match(sub_name): _logger.debug(u'Skipping "%s" because it is excluded.', sub_name) continue _logger.debug(u'Loading "%s".', sub_name) folder_hint = None if sub_name.find('/') != -1: (folder_hint, sub_name) = sub_name.split('/', 1) if not folder_hint and options.folderhints: folder_hint = sub_album.getfolderhint() if not folder_hint and folderpatterns: for (pattern, folder) in folderpatterns: if pattern.match(sub_album.name): if options.verbose: su.pout("Using folder %s for album %s." % (folder, sub_album.name)) folder_hint = folder break prefix = folder_prefix if folder_hint is not None: prefix = prefix + imageutils.make_foldername(folder_hint) + "/" formatted_name = imageutils.format_album_name( sub_album, sub_name, options.foldertemplate) sub_name = prefix + imageutils.make_foldername(formatted_name, options.enablesubfolders) sub_name = self._find_unused_folder(sub_name) # first, do the sub-albums self.process_albums(sub_album.albums, album_types, folder_prefix, includes, excludes, options, matched) # now the album itself picture_directory = ExportDirectory( sub_name, sub_album, os.path.join(self.albumdirectory, sub_name)) if picture_directory.add_iphoto_images(sub_album.images, options) > 0: self.named_folders[sub_name] = picture_directory return len(self.named_folders)
def check_iptc_data(self, export_file, options, is_original=False, file_updated=False): """Tests if a file has the proper keywords and caption in the meta data.""" if not su.getfileextension(export_file) in _EXIF_EXTENSIONS: return False messages = [] iptc_data = exiftool.get_iptc_data(export_file) new_caption = imageutils.get_photo_caption(self.photo, self.container, options.captiontemplate) if not su.equalscontent(iptc_data.caption, new_caption): messages.append(u' File caption: %s' % (su.nn_string(iptc_data.caption).strip())) messages.append(u' iPhoto caption: %s' % (new_caption)) else: new_caption = None new_keywords = None new_date = None new_rating = -1 new_keywords = self.get_export_keywords(options.face_keywords) if not imageutils.compare_keywords(new_keywords, iptc_data.keywords): messages.append(u' File keywords: %s' % (u','.join(iptc_data.keywords))) if new_keywords == None: messages.append(u' iPhoto keywords: <None>') else: messages.append(u' iPhoto keywords: %s' % (u','.join(new_keywords))) else: new_keywords = None if not options.aperture: #if self.photo.date and date_time_original != self.photo.date: # messages.append(u' File date: %s' % (date_time_original)) # messages.append(u' iPhoto date: %s' % (self.photo.date)) # new_date = self.photo.date if self.photo.rating != None and iptc_data.rating != self.photo.rating: messages.append(u' File rating: %d' % (iptc_data.rating)) messages.append(u' iPhoto rating: %d' % (self.photo.rating)) new_rating = self.photo.rating else: if options.face_keywords: merged_keywords = iptc_data.keywords[:] for keyword in self.photo.getfaces(): if not keyword in merged_keywords: merged_keywords.append(keyword) new_keywords = merged_keywords if iptc_data.hierarchical_subject and not options.reverse: messages.append(u' File subjects: %s' % (u','.join(iptc_data.hierarchical_subject))) new_gps = None if options.gps and self.photo.gps: if (not iptc_data.gps or not self.photo.gps.is_same(iptc_data.gps)): if iptc_data.gps: old_gps = iptc_data.gps else: old_gps = imageutils.GpsLocation() messages.append(u' File GPS: %s' % (old_gps.to_string())) messages.append(u' iPhoto GPS: %s' % (self.photo.gps.to_string())) new_gps = self.photo.gps # Don't export the faces into the original file (could have been # cropped). do_faces = options.faces and not is_original (new_rectangles, new_persons) = self._check_person_iptc_data( export_file, iptc_data.region_rectangles, iptc_data.region_names, do_faces, messages) if (new_caption != None or new_keywords != None or new_date or (not options.reverse and iptc_data.hierarchical_subject) or new_gps or new_rating != -1 or new_rectangles != None or new_persons != None): su.pout(u'Updating IPTC for %s because of\n%s' % (export_file, u'\n'.join(messages))) if (file_updated or imageutils.should_update(options)) and not options.dryrun: exiftool.update_iptcdata(export_file, new_caption, new_keywords, new_date, new_rating, new_gps, new_rectangles, new_persons, iptc_data.image_width, iptc_data.image_height, hierarchical_subject=[]) return True return False
def check_iptc_data(self, export_file, options, is_original=False, file_updated=False): """Tests if a file has the proper keywords and caption in the meta data.""" if not su.getfileextension(export_file) in _EXIF_EXTENSIONS: return False messages = [] iptc_data = exiftool.get_iptc_data(export_file) new_caption = imageutils.get_photo_caption(self.photo, self.container, options.captiontemplate) if not su.equalscontent(iptc_data.caption, new_caption): messages.append(u' File caption: %s' % (su.nn_string(iptc_data.caption).strip())) messages.append(u' iPhoto caption: %s' % (new_caption)) else: new_caption = None new_keywords = None new_date = None new_rating = -1 new_keywords = self.get_export_keywords(options.face_keywords) if not imageutils.compare_keywords(new_keywords, iptc_data.keywords): messages.append(u' File keywords: %s' % (u','.join(iptc_data.keywords))) if new_keywords == None: messages.append(u' iPhoto keywords: <None>') else: messages.append(u' iPhoto keywords: %s' % (u','.join(new_keywords))) else: new_keywords = None if not options.aperture: #if self.photo.date and date_time_original != self.photo.date: # messages.append(u' File date: %s' % (date_time_original)) # messages.append(u' iPhoto date: %s' % (self.photo.date)) # new_date = self.photo.date if self.photo.rating != None and iptc_data.rating != self.photo.rating: messages.append(u' File rating: %d' % (iptc_data.rating)) messages.append(u' iPhoto rating: %d' % (self.photo.rating)) new_rating = self.photo.rating else: if options.face_keywords: merged_keywords = iptc_data.keywords[:] for keyword in self.photo.getfaces(): if not keyword in merged_keywords: merged_keywords.append(keyword) new_keywords = merged_keywords if iptc_data.hierarchical_subject and not options.reverse: messages.append(u' File subjects: %s' % (u','.join(iptc_data.hierarchical_subject))) new_gps = None if options.gps and self.photo.gps: if (not iptc_data.gps or not self.photo.gps.is_same(iptc_data.gps)): if iptc_data.gps: old_gps = iptc_data.gps else: old_gps = imageutils.GpsLocation() messages.append(u' File GPS: %s' % (old_gps.to_string())) messages.append(u' iPhoto GPS: %s' % (self.photo.gps.to_string())) new_gps = self.photo.gps # Don't export the faces into the original file (could have been # cropped). do_faces = options.faces and not is_original (new_rectangles, new_persons) = self._check_person_iptc_data( export_file, iptc_data.region_rectangles, iptc_data.region_names, do_faces, messages) if (new_caption != None or new_keywords != None or new_date or (not options.reverse and iptc_data.hierarchical_subject) or new_gps or new_rating != -1 or new_rectangles != None or new_persons != None): su.pout(u'Updating IPTC for %s because of\n%s' % (export_file, u'\n'.join(messages))) if (file_updated or imageutils.should_update(options)) and not options.dryrun: exiftool.update_iptcdata(export_file, new_caption, new_keywords, new_date, new_rating, new_gps, new_rectangles, new_persons, iptc_data.image_width, iptc_data.image_height, hierarchical_subject=[]) return True return False
def __init__(self, key, data, keyword_map, face_map, aperture_data): self.id = key self.data = data self._caption = su.nn_string(data.get("Caption")).strip() self.comment = su.nn_string(data.get("Comment")).strip() version = None if aperture_data: version = aperture_data.versions.get(key) if data.has_key("DateAsTimerInterval"): self.date = applexml.getappletime(data.get("DateAsTimerInterval")) elif version: self.date = version.image_date else: # Try to get the date from a the caption in "YYYYMMDD ..." format m = re.match(_CAPTION_PATTERN, self._caption) if m: year = int(m.group(1)) month = int(m.group(2)) if not month: month = 1 date = int(m.group(3)) if not date: date = 1 self.date = datetime.datetime(year, month, date) else: self.date = None self.mod_date = applexml.getappletime( data.get("ModDateAsTimerInterval")) self.image_path = data.get("ImagePath") if data.has_key("Rating"): self.rating = int(data.get("Rating")) elif version: self.rating = version.mainRating else: self.rating = None if data.get("longitude"): latitude = float(data.get("latitude")) longitude = float(data.get("longitude")) self.gps = imageutils.GpsLocation(latitude, longitude) elif version: self.gps = version.location else: self.gps = None self.keywords = [] keyword_list = data.get("Keywords") if keyword_list is not None: for i in keyword_list: self.keywords.append(keyword_map.get(i)) elif version: self.keywords = version.keywords if version: self.originalpath = None # This is just a placeholder... # Use the preview if there are adjustments. if (version.rotation or version.hasAdjustments or not su.getfileextension( version.master_image_path) in _JPG_EXTENSIONS): #if version.rotation: # su.pout(u"Rotated: %s (%d)" % (self._caption, version.rotation)) #if version.hasAdjustments: # su.pout(u"Adjustments: %s" % (self._caption)) #if not su.getfileextension(version.master_image_path) in _JPG_EXTENSIONS: # su.pout(u"Not JPEG: %s" % (self._caption)) self.originalpath = version.master_image_path if not version.imageProxy.fullSizePreviewPath: su.pout(u"No preview path for %s." % (self.caption)) else: self.image_path = version.imageProxy.fullSizePreviewPath else: self.image_path = version.master_image_path self.originalpath = None if not version.imageProxy.fullSizePreviewUpToDate: su.pout(u"%s: full size preview not up to date." % (self.caption)) else: self.originalpath = data.get("OriginalPath") self.roll = data.get("Roll") self.albums = [] # list of albums that this image belongs to self.faces = [] self.face_rectangles = [] self.event_name = '' # name of event (roll) that this image belongs to self.event_index = '' # index within event self.event_index0 = '' # index with event, left padded with 0 face_list = data.get("Faces") if face_list: for face_entry in face_list: face_key = face_entry.get("face key") face_name = face_map.get(face_key) if face_name: self.faces.append(face_name) # Rectangle is '{{x, y}, {width, height}}' as ratios, # referencing the lower left corner of the face rectangle, # with lower left corner of image as (0,0) rectangle = parse_face_rectangle( face_entry.get("rectangle")) # Convert to using center of area, relative to upper left corner of image rectangle[0] += rectangle[2] / 2.0 rectangle[1] = max(0.0, 1.0 - rectangle[1] - rectangle[3] / 2.0) self.face_rectangles.append(rectangle) # Other keys in face_entry: face index # Now sort the faces left to right. sorted_names = {} sorted_rectangles = {} for i in xrange(len(self.faces)): x = self.face_rectangles[i][0] while sorted_names.has_key(x): x += 0.00001 sorted_names[x] = self.faces[i] sorted_rectangles[x] = self.face_rectangles[i] self.faces = [ sorted_names[x] for x in sorted(sorted_names.keys()) ] self.face_rectangles = [ sorted_rectangles[x] for x in sorted(sorted_rectangles.keys()) ]
def _check_need_to_export(self, source_file, options): """Returns true if the image file needs to be exported. Args: source_file: path to image file, with aliases resolved. options: processing options. """ if not os.path.exists(self.export_file): return True # In link mode, check the inode. if options.link: export_stat = os.stat(self.export_file) source_stat = os.stat(source_file) if export_stat.st_ino != source_stat.st_ino: su.pout('Changed: %s: inodes don\'t match: %d vs. %d' % (self.export_file, export_stat.st_ino, source_stat.st_ino)) return True if (not options.reverse and os.path.getmtime(self.export_file) + _MTIME_FUDGE < os.path.getmtime(source_file)): su.pout('Changed: %s: newer version is available: %s vs. %s' % (self.export_file, time.ctime(os.path.getmtime(self.export_file)), time.ctime(os.path.getmtime(source_file)))) return True if (options.reverse and os.path.getmtime(source_file) + _MTIME_FUDGE < os.path.getmtime(self.export_file)): su.pout('Changed: %s: newer version is available: %s vs. %s' % (self.export_file, time.ctime(os.path.getmtime(source_file)), time.ctime(os.path.getmtime(self.export_file)))) return True if not self.size and not options.reverse: # With creative renaming in iPhoto it is possible to get # stale files if titles get swapped between images. Double # check the size, allowing for some difference for meta data # changes made in the exported copy source_size = os.path.getsize(source_file) export_size = os.path.getsize(self.export_file) diff = abs(source_size - export_size) if diff > _MAX_FILE_DIFF or (diff > 32 and options.link): su.pout('Changed: %s: file size: %d vs. %d' % (self.export_file, export_size, source_size)) return True # In reverse mode, we don't check the file size (might have changed because # of Preview regeneration), so we look at the image dimensions instead to catch # some out-of-sync images. if options.reverse and su.getfileextension(self.export_file) in _EXIF_EXTENSIONS: (source_width, source_height) = imageutils.get_image_width_height(source_file) (export_width, export_height) = imageutils.get_image_width_height(self.export_file) if ((source_width and export_width and source_width != export_width) or (source_height and export_height and source_height != export_height)): su.pout('Changed: %s: dimensions: %dx%d vs. %dx%d' % ( self.export_file, source_width, source_height, export_width, export_height)) return True # In link mode, we don't need to check the modification date in the # database because we catch the changes by the size check above. #if (not options.link and # datetime.datetime.fromtimestamp(os.path.getmtime( # self.export_file)) < self.photo.mod_date): # su.pout('Changed: %s: modified in iPhoto: %s vs. %s ' % ( # self.export_file, # time.ctime(os.path.getmtime(self.export_file)), # self.photo.mod_date)) # return True return False
def process_albums(self, albums, album_types, folder_prefix, includes, excludes, options, matched=False): """Walks trough an iPhoto album tree, and discovers albums (directories).""" include_pattern = re.compile(su.unicode_string(includes)) exclude_pattern = None if excludes: exclude_pattern = re.compile(su.unicode_string(excludes)) # Figure out the folder patterns (if any) folderpatterns = [] if options.folderpatterns: for pattern in su.unicode_string(options.folderpatterns).split(','): (expression, folder) = pattern.split('/', 2) folderpatterns.append((re.compile(expression), folder)) # first, do the sub-albums for sub_album in albums: if self._check_abort(): return sub_name = sub_album.name if not sub_name: print "Found an album with no name: " + sub_album.albumid sub_name = "xxx" # check the album type if sub_album.albumtype == "Folder" or sub_album.albums: sub_matched = matched if include_pattern.match(sub_name): sub_matched = True new_name = folder_prefix if sub_album.albumtype == "Folder": new_name += imageutils.make_foldername(sub_name) + "/" self.process_albums(sub_album.albums, album_types, new_name, includes, excludes, options, sub_matched) continue elif (sub_album.albumtype == "None" or not sub_album.albumtype in album_types): # print "Ignoring " + sub_album.name + " of type " + \ # sub_album.albumtype continue if not matched and not include_pattern.match(sub_name): _logger.debug(u'Skipping "%s" because it does not match pattern.', sub_name) continue if exclude_pattern and exclude_pattern.match(sub_name): _logger.debug(u'Skipping "%s" because it is excluded.', sub_name) continue _logger.debug(u'Loading "%s".', sub_name) folder_hint = None if sub_name.find('/') != -1: (folder_hint, sub_name) = sub_name.split('/', 1) if not folder_hint and options.folderhints: folder_hint = sub_album.getfolderhint() if not folder_hint and folderpatterns: for (pattern, folder) in folderpatterns: if pattern.match(sub_album.name): if options.verbose: su.pout("Using folder %s for album %s." % (folder, sub_album.name)) folder_hint = folder break prefix = folder_prefix if folder_hint is not None: prefix = prefix + imageutils.make_foldername(folder_hint) + "/" formatted_name = imageutils.format_album_name( sub_album, sub_name, options.foldertemplate) sub_name = prefix + imageutils.make_foldername(formatted_name) sub_name = self._find_unused_folder(sub_name) # first, do the sub-albums self.process_albums(sub_album.albums, album_types, folder_prefix, includes, excludes, options, matched) # now the album itself picture_directory = ExportDirectory( sub_name, sub_album, os.path.join(self.albumdirectory, sub_name)) if picture_directory.add_iphoto_images(sub_album.images, options) > 0: self.named_folders[sub_name] = picture_directory return len(self.named_folders)
def get_photo_caption(photo, container, caption_template): """Gets the caption for a IPhotoImage photo, using a template. Supports: {caption} - the iPhoto caption (title). {description} - the iPhoto comment. {dated_caption_description} - the caption and comments from an IPhotoImage combined into a single string, nicely formatted like YYYY/MM/DD title: description. {folder_description} - the iPhoto comment from the enclosing event, folder, or album Args: photo - an IPhotoImage photo. caption_template - a format string. """ nodate_title_description = photo.caption match = re.match(_CAPTION_PATTERN_INDEX, photo.caption) if not match: match = re.match(_CAPTION_PATTERN, photo.caption) else: # Strip off trailing index nodate_title_description = '%s%s%s %s' % ( match.group(1), match.group(2), match.group(3), match.group(4)) if match: # Strip of leading date nodate_title_description = nodate_title_description[8:].strip() title_description = photo.caption if photo.comment: title_description += ': ' + photo.comment nodate_title_description += ': ' + photo.comment folder_description = container.getcommentwithouthints().strip() if photo.date: year = str(photo.date.year) month = str(photo.date.month).zfill(2) day = str(photo.date.day).zfill(2) else: year = '' month = '' day = '' names = photo.getfaces() if names: face_list = '(%s)' % (', '.join(names)) else: face_list = '' if check_faces_in_caption(photo): opt_face_list = '' else: opt_face_list = '(%s)' % (', '.join(photo.getfaces())) try: return caption_template.format( title=photo.caption, description=photo.comment, title_description=title_description, nodate_title_description=nodate_title_description, folder_description=folder_description, yyyy=year, mm=month, dd=day, face_list=face_list, opt_face_list=opt_face_list).strip() except KeyError, ex: su.pout( u'Unrecognized field in caption template: %s. Use one of: title, description, ' 'title_description, yyyy, mm, dd.' % (str(ex))) return caption_template
def __init__(self, name, albumtype, data, images, ratings, aperture_data=None, verbose=False): self.name = name self._date = None self.uuid = None self.comment = None if data: if data.get("RollDateAsTimerInterval"): self._date = applexml.getappletime(data.get("RollDateAsTimerInterval")) if data.get("uuid"): self.uuid = data.get("uuid") if self.uuid == 'lastImportAlbum': albumtype = "Special Roll" if 'Comments' in data: self.comment = data.get("Comments") if aperture_data: container = None if self.uuid and self.uuid in aperture_data.folders: container = aperture_data.folders[self.uuid] elif self.uuid and self.uuid in aperture_data.albums: container = aperture_data.albums[self.uuid] if container and container.note: self.comment = container.note # The iPhoto master album has no album type. if not albumtype and data and data.has_key("Master"): albumtype = 'Master' # Convert Aperture numeric album types to iPhoto album type names. if albumtype in _APERTURE_ALBUM_TYPES: albumtype = _APERTURE_ALBUM_TYPES[albumtype] elif not albumtype: if verbose: su.pout(u'No album type for %s.' % name) elif albumtype.isdigit(): albumid = int(albumtype) if albumid > 90: # 94 - Photos # 95 - Flagged # 96 - Library Album # 97 - Projects # 98 - Aperture # 99 - Aperture Library albumtype = name else: print 'Unknown album type %s for %s.' % (albumtype, name) self.albumtype = albumtype self.data = data self.albumid = -1 self.images = [] self.albums = [] self.master = False hidden = 0 if not self.isfolder() and data and ( data.has_key("KeyList") or data.has_key("KeyListString")): keylist = data.get("KeyList") if data.has_key("KeyList") else data.get( "KeyListString").split(",") for key in keylist: if not key: continue image = images.get(key) if image: if ratings and not image.rating in ratings: continue self.images.append(image) else: hidden += 1 if verbose: su.pout(u"%s: image with id %s does not exist - could be hidden." % (name, key)) if hidden: su.pout(u"%s: %d images not exported (probably hidden)." % (name, hidden)) self._assign_names()
def _check_need_to_export(self, source_file, options): """Returns true if the image file needs to be exported. Args: source_file: path to image file, with aliases resolved. options: processing options. """ if not os.path.exists(self.export_file): return True # In link mode, check the inode. if options.link: export_stat = os.stat(self.export_file) source_stat = os.stat(source_file) if export_stat.st_ino != source_stat.st_ino: su.pout('Changed: %s: inodes don\'t match: %d vs. %d' % (self.export_file, export_stat.st_ino, source_stat.st_ino)) return True if (not options.reverse and os.path.getmtime(self.export_file) + _MTIME_FUDGE < os.path.getmtime(source_file)): su.pout('Changed: %s: newer version is available: %s vs. %s' % (self.export_file, time.ctime(os.path.getmtime(self.export_file)), time.ctime(os.path.getmtime(source_file)))) return True if (options.reverse and os.path.getmtime(source_file) + _MTIME_FUDGE < os.path.getmtime(self.export_file)): su.pout('Changed: %s: newer version is available: %s vs. %s' % (self.export_file, time.ctime(os.path.getmtime(source_file)), time.ctime(os.path.getmtime(self.export_file)))) return True if not self.size and not options.reverse: # With creative renaming in iPhoto it is possible to get # stale files if titles get swapped between images. Double # check the size, allowing for some difference for meta data # changes made in the exported copy source_size = os.path.getsize(source_file) export_size = os.path.getsize(self.export_file) diff = abs(source_size - export_size) if diff > _MAX_FILE_DIFF or (diff > 32 and options.link): su.pout('Changed: %s: file size: %d vs. %d' % (self.export_file, export_size, source_size)) return True # In reverse mode, we don't check the file size (might have changed because # of Preview regeneration), so we look at the image dimensions instead to catch # some out-of-sync images. if options.reverse and su.getfileextension(self.export_file) in _EXIF_EXTENSIONS: (source_width, source_height) = imageutils.get_image_width_height(source_file) (export_width, export_height) = imageutils.get_image_width_height(self.export_file) if ((source_width and export_width and source_width != export_width) or (source_height and export_height and source_height != export_height)): su.pout('Changed: %s: dimensions: %dx%d vs. %dx%d' % ( self.export_file, source_width, source_height, export_width, export_height)) return True # In link mode, we don't need to check the modification date in the # database because we catch the changes by the size check above. #if (not options.link and # datetime.datetime.fromtimestamp(os.path.getmtime( # self.export_file)) < self.photo.mod_date): # su.pout('Changed: %s: modified in iPhoto: %s vs. %s ' % ( # self.export_file, # time.ctime(os.path.getmtime(self.export_file)), # self.photo.mod_date)) # return True return False
def run_phoshare(cmd_args): """main routine for phoshare.""" parser = get_option_parser() (options, args) = parser.parse_args(cmd_args) if len(args) != 0: parser.error("Found some unrecognized arguments on the command line.") if options.version: print '%s %s' % (phoshare.phoshare_version.PHOSHARE_VERSION, phoshare.phoshare_version.PHOSHARE_BUILD) return 1 if options.iptc > 0 and not exiftool.check_exif_tool(): print >> sys.stderr, ("Exiftool is needed for the --itpc or --iptcall" + " options.") return 1 if options.size and options.link: parser.error("Cannot use --size and --link together.") if not options.iphoto: parser.error("Need to specify the iPhoto library with the --iphoto " "option.") if options.export or options.picasaweb or options.checkalbumsize: if not (options.albums or options.events or options.smarts or options.facealbums): parser.error("Need to specify at least one event, album, or smart " "album for exporting, using the -e, -a, or -s " "options.") else: parser.error("No action specified. Use --export to export from your " "iPhoto library.") if options.picasaweb: if options.picasapassword: google_password = options.picasapassword else: google_password = getpass.getpass('Google password for %s: ' % options.picasaweb) if options.ratings: options.ratings = [int(r) for r in options.ratings.split(",")] if options.reverse: if not options.dryrun: su.pout(u"Turning on dryrun mode because of --reverse option.") options.dryrun = True logging_handler = logging.StreamHandler() logging_handler.setLevel(logging.DEBUG if options.verbose else logging.INFO) _logger.addHandler(logging_handler) album_xml_file = iphotodata.get_album_xmlfile( su.expand_home_folder(options.iphoto)) album_sql_file = iphotodata.get_album_sqlfile( su.expand_home_folder(options.iphoto)) data = iphotodata.get_iphoto_data(album_xml_file, album_sql_file, ratings=options.ratings, verbose=options.verbose, aperture=options.aperture) if options.originals and options.export: data.load_aperture_originals() options.aperture = data.aperture and not data.aperture_data options.foldertemplate = unicode(options.foldertemplate) options.nametemplate = unicode(options.nametemplate) options.captiontemplate = unicode(options.captiontemplate) if options.checkalbumsize: data.checkalbumsizes(int(options.checkalbumsize)) if options.export: album = ExportLibrary(su.expand_home_folder(options.export)) export_iphoto(album, data, options.exclude, options) if options.picasaweb: try: import phoshare.picasaweb as picasaweb albums = picasaweb.PicasaAlbums(options.picasaweb, google_password) export_iphoto(albums, data, options.exclude, options) except ImportError: su.perr('Sorry, this version of Phoshare does not support uploading to PicasaWeb.')
def check_iptc_data(self, export_file, options, is_original=False): """Tests if a file has the proper keywords and caption in the meta data.""" if not su.getfileextension(export_file) in ("jpg", "tif", "tiff", "png", "nef", "cr2"): return False (file_keywords, file_caption, date_time_original, rating, gps, region_rectangles, region_names) = exiftool.get_iptc_data(export_file) if options.aperture: # Aperture maintains all these metadata in the preview files, and # does not even save all the information into the .xml file. new_caption = None new_keywords = None new_date = None new_rating = -1 new_gps = None else: new_caption = imageutils.get_photo_caption(self.photo, options.captiontemplate) if not su.equalscontent(file_caption, new_caption): su.pout('Updating IPTC for %s because it has Caption "%s" ' 'instead of "%s".' % (export_file, file_caption, new_caption)) else: new_caption = None new_keywords = self.get_export_keywords(options.face_keywords) if not imageutils.compare_keywords(new_keywords, file_keywords): su.pout("Updating IPTC for %s because of keywords (%s instead " "of %s)" % (export_file, ",".join(file_keywords), ",".join(new_keywords))) else: new_keywords = None new_date = None if self.photo.date and date_time_original != self.photo.date: su.pout("Updating IPTC for %s because of date (%s instead of " "%s)" % (export_file, date_time_original, self.photo.date)) new_date = self.photo.date new_rating = -1 if self.photo.rating != None and rating != self.photo.rating: su.pout( "Updating IPTC for %s because of rating (%d instead of " "%d)" % (export_file, rating, self.photo.rating)) new_rating = self.photo.rating new_gps = None if options.gps and self.photo.gps: if (not gps or not self.photo.gps.is_same(gps)): if gps: old_gps = gps else: old_gps = imageutils.GpsLocation() su.pout("Updating IPTC for %s because of GPS %s vs %s" % (export_file, old_gps.to_string(), self.photo.gps.to_string())) new_gps = self.photo.gps # Don't export the faces into the original file (could have been # cropped). do_faces = options.faces and not is_original (new_rectangles, new_persons) = self._check_person_iptc_data(export_file, region_rectangles, region_names, do_faces) if (new_caption != None or new_keywords != None or new_date or new_gps or new_rating != -1 or new_rectangles or new_persons): if not options.dryrun: exiftool.update_iptcdata(export_file, new_caption, new_keywords, new_date, new_rating, new_gps, new_rectangles, new_persons) return True return False
command.append('-RegionAreaX=%s' % (str(rectangle[0]))) command.append('-RegionAreaY=%s' % (str(rectangle[1]))) command.append('-RegionAreaW=%s' % (str(rectangle[2]))) command.append('-RegionAreaH=%s' % (str(rectangle[3]))) command.append('-RegionAreaUnit=normalized') elif new_rectangles != None: command.append('-RegionAreaX=') command.append("-iptc:CodedCharacterSet=ESC % G") command.append(filepath) result = su.fsdec(su.execandcombine(command)) if tmp: os.remove(tmp) if result.find("1 image files updated") != -1: if result != "1 image files updated": su.pout(result) # wipe out the back file created by exiftool backup_file = filepath + "_original" if os.path.exists(backup_file): os.remove(backup_file) return True else: su.perr("Failed to update IPTC data in image %s: %s" % ( filepath, result)) return False def _write_caption_file(new_caption, command): """If new_caption is set, write it into a tempory file, add a parameter to command, and return the file handle.""" if new_caption is None:
def check_iptc_data(self, export_file, options, is_original=False): """Tests if a file has the proper keywords and caption in the meta data.""" if not su.getfileextension(export_file) in ("jpg", "tif", "tiff", "png", "nef", "cr2"): return False (file_keywords, file_caption, date_time_original, rating, gps, region_rectangles, region_names) = exiftool.get_iptc_data( export_file) if options.aperture: # Aperture maintains all these metadata in the preview files, and # does not even save all the information into the .xml file. new_caption = None new_keywords = None new_date = None new_rating = -1 new_gps = None else: new_caption = imageutils.get_photo_caption(self.photo, options.captiontemplate) if not su.equalscontent(file_caption, new_caption): su.pout('Updating IPTC for %s because it has Caption "%s" ' 'instead of "%s".' % (export_file, file_caption, new_caption)) else: new_caption = None new_keywords = self.get_export_keywords(options.face_keywords) if not imageutils.compare_keywords(new_keywords, file_keywords): su.pout("Updating IPTC for %s because of keywords (%s instead " "of %s)" % (export_file, ",".join(file_keywords), ",".join(new_keywords))) else: new_keywords = None new_date = None if self.photo.date and date_time_original != self.photo.date: su.pout("Updating IPTC for %s because of date (%s instead of " "%s)" % (export_file, date_time_original, self.photo.date)) new_date = self.photo.date new_rating = -1 if self.photo.rating != None and rating != self.photo.rating: su.pout("Updating IPTC for %s because of rating (%d instead of " "%d)" % (export_file, rating, self.photo.rating)) new_rating = self.photo.rating new_gps = None if options.gps and self.photo.gps: if (not gps or not self.photo.gps.is_same(gps)): if gps: old_gps = gps else: old_gps = imageutils.GpsLocation() su.pout("Updating IPTC for %s because of GPS %s vs %s" % (export_file, old_gps.to_string(), self.photo.gps.to_string())) new_gps = self.photo.gps # Don't export the faces into the original file (could have been # cropped). do_faces = options.faces and not is_original (new_rectangles, new_persons) = self._check_person_iptc_data( export_file, region_rectangles, region_names, do_faces) if (new_caption != None or new_keywords != None or new_date or new_gps or new_rating != -1 or new_rectangles or new_persons): if not options.dryrun: exiftool.update_iptcdata(export_file, new_caption, new_keywords, new_date, new_rating, new_gps, new_rectangles, new_persons) return True return False
def update_iptcdata(filepath, new_caption, new_keywords, new_datetime, new_rating, new_gps, new_rectangles, new_persons): """Updates the caption and keywords of an image file.""" # Some cameras write into ImageDescription, so we wipe it out to not cause # conflicts with Caption-Abstract. We also wipe out the XMP Subject and # Description tags (we use Keywords and Caption-Abstract). command = [ EXIFTOOL, '-F', '-m', '-P', '-ImageDescription=', '-Subject=', '-Description=' ] tmp = None if not new_caption is None: if not new_caption: command.append('-Caption-Abstract=') else: tmpfd, tmp = tempfile.mkstemp(dir="/var/tmp") os.close(tmpfd) file1 = open(tmp, "w") print >> file1, new_caption.encode("utf-8") file1.close() command.append('-Caption-Abstract<=%s' % (tmp)) if new_datetime: command.append('-DateTimeOriginal="%s"' % (new_datetime.strftime("%Y:%m:%d %H:%M:%S"))) if new_keywords: for keyword in new_keywords: command.append(u'-keywords=%s' % (keyword)) elif new_keywords != None: command.append('-keywords=') if new_rating >= 0: command.append('-Rating=%d' % (new_rating)) if new_gps: command.append('-c') command.append('%.6f') command.append('-GPSLatitude="%f"' % (abs(new_gps.latitude))) command.append('-GPSLatitudeRef=' + new_gps.latitude_ref()) command.append('-GPSLongitude="%f"' % (abs(new_gps.longitude))) command.append('-GPSLongitudeRef=' + new_gps.longitude_ref()) if new_persons: for person in new_persons: command.append(u'-RegionPersonDisplayName=%s' % (person)) elif new_persons != None: command.append('-RegionPersonDisplayName=') if new_rectangles: for rectangle in new_rectangles: command.append('-RegionRectangle=%s' % (','.join(str(c) for c in rectangle))) elif new_rectangles != None: command.append('-RegionRectangle=') command.append("-iptc:CodedCharacterSet=ESC % G") command.append(filepath) result = su.fsdec(su.execandcombine(command)) if tmp: os.remove(tmp) if result.find("1 image files updated") != -1: if result != "1 image files updated": su.pout(result) # wipe out the back file created by exiftool backup_file = filepath + "_original" if os.path.exists(backup_file): os.remove(backup_file) return True else: su.perr("Failed to update IPTC data in image %s: %s" % (filepath, result)) return False
def run_phoshare(cmd_args): """main routine for phoshare.""" parser = get_option_parser() (options, args) = parser.parse_args(cmd_args) if len(args) != 0: parser.error("Found some unrecognized arguments on the command line.") if options.version: print '%s %s' % (phoshare.phoshare_version.PHOSHARE_VERSION, phoshare.phoshare_version.PHOSHARE_BUILD) return 1 if options.iptc > 0 and not exiftool.check_exif_tool(): print >> sys.stderr, ("Exiftool is needed for the --itpc or --iptcall" + " options.") return 1 if options.size and options.link: parser.error("Cannot use --size and --link together.") if not options.iphoto: parser.error("Need to specify the iPhoto library with the --iphoto " "option.") if options.export or options.picasaweb or options.checkalbumsize: if not (options.albums or options.events or options.smarts or options.facealbums): parser.error("Need to specify at least one event, album, or smart " "album for exporting, using the -e, -a, or -s " "options.") else: parser.error("No action specified. Use --export to export from your " "iPhoto library.") if options.picasaweb: if options.picasapassword: google_password = options.picasapassword else: google_password = getpass.getpass('Google password for %s: ' % options.picasaweb) if options.ratings: options.ratings = [int(r) for r in options.ratings.split(",")] if options.reverse: if not options.dryrun: su.pout(u"Turning on dryrun mode because of --reverse option.") options.dryrun = True logging_handler = logging.StreamHandler() logging_handler.setLevel(logging.DEBUG if options.verbose else logging.INFO) _logger.addHandler(logging_handler) album_xml_file = iphotodata.get_album_xmlfile( su.expand_home_folder(options.iphoto)) if options.omitdatabasefile: album_sql_file="" else: album_sql_file = iphotodata.get_album_sqlfile( su.expand_home_folder(options.iphoto)) data = iphotodata.get_iphoto_data(album_xml_file, album_sql_file, ratings=options.ratings, verbose=options.verbose, aperture=options.aperture) if options.originals and options.export: data.load_aperture_originals() options.aperture = data.aperture and not data.aperture_data options.foldertemplate = unicode(options.foldertemplate) options.nametemplate = unicode(options.nametemplate) options.captiontemplate = unicode(options.captiontemplate) if options.checkalbumsize: data.checkalbumsizes(int(options.checkalbumsize)) if options.export: album = ExportLibrary(su.expand_home_folder(options.export)) export_iphoto(album, data, options.exclude, options) if options.picasaweb: try: import phoshare.picasaweb as picasaweb albums = picasaweb.PicasaAlbums(options.picasaweb, google_password) export_iphoto(albums, data, options.exclude, options) except ImportError: su.perr('Sorry, this version of Phoshare does not support uploading to PicasaWeb.')
command.append('-RegionAreaX=%s' % (str(rectangle[0]))) command.append('-RegionAreaY=%s' % (str(rectangle[1]))) command.append('-RegionAreaW=%s' % (str(rectangle[2]))) command.append('-RegionAreaH=%s' % (str(rectangle[3]))) command.append('-RegionAreaUnit=normalized') elif new_rectangles != None: command.append('-RegionAreaX=') command.append("-iptc:CodedCharacterSet=ESC % G") command.append(filepath) result = su.fsdec(su.execandcombine(command)) if tmp: os.remove(tmp) if result.find("1 image files updated") != -1: if result != "1 image files updated": su.pout(result) # wipe out the back file created by exiftool backup_file = filepath + "_original" if os.path.exists(backup_file): os.remove(backup_file) return True else: su.perr("Failed to update IPTC data in image %s: %s" % (filepath, result)) return False def _write_caption_file(new_caption, command): """If new_caption is set, write it into a tempory file, add a parameter to command, and return the file handle."""
def __init__(self, name, albumtype, data, images, ratings, aperture_data=None, verbose=False): self.name = name self._date = None self.uuid = None self.comment = None if data: if data.get("RollDateAsTimerInterval"): self._date = applexml.getappletime( data.get("RollDateAsTimerInterval")) if data.get("uuid"): self.uuid = data.get("uuid") if 'Comments' in data: self.comment = data.get("Comments") if aperture_data: container = None if self.uuid and self.uuid in aperture_data.folders: container = aperture_data.folders[self.uuid] elif self.uuid and self.uuid in aperture_data.albums: container = aperture_data.albums[self.uuid] if container and container.note: self.comment = container.note # The iPhoto master album has no album type. if not albumtype and data and data.has_key("Master"): albumtype = 'Master' # Convert Aperture numeric album types to iPhoto album type names. if albumtype in _APERTURE_ALBUM_TYPES: albumtype = _APERTURE_ALBUM_TYPES[albumtype] elif not albumtype: if verbose: su.pout(u'No album type for %s.' % name) elif albumtype.isdigit(): albumid = int(albumtype) if albumid > 90: # 94 - Photos # 95 - Flagged # 96 - Library Album # 97 - Projects # 98 - Aperture # 99 - Aperture Library albumtype = name else: print 'Unknown album type %s for %s.' % (albumtype, name) self.albumtype = albumtype self.data = data self.albumid = -1 self.images = [] self.albums = [] self.master = False hidden = 0 if not self.isfolder() and data and (data.has_key("KeyList") or data.has_key("KeyListString")): keylist = data.get("KeyList") if data.has_key( "KeyList") else data.get("KeyListString").split(",") for key in keylist: if not key: continue image = images.get(key) if image: if ratings and not image.rating in ratings: continue self.images.append(image) else: hidden += 1 if verbose: su.pout( u"%s: image with id %s does not exist - could be hidden." % (name, key)) if hidden: su.pout(u"%s: %d images not exported (probably hidden)." % (name, hidden)) self._assign_names()
def update_iptcdata(filepath, new_caption, new_keywords, new_datetime, new_rating, new_gps, new_rectangles, new_persons): """Updates the caption and keywords of an image file.""" # Some cameras write into ImageDescription, so we wipe it out to not cause # conflicts with Caption-Abstract. We also wipe out the XMP Subject and # Description tags (we use Keywords and Caption-Abstract). command = [EXIFTOOL, '-F', '-m', '-P', '-ImageDescription=', '-Subject=', '-Description='] tmp = None if not new_caption is None: if not new_caption: command.append('-Caption-Abstract=') else: tmpfd, tmp = tempfile.mkstemp(dir="/var/tmp") os.close(tmpfd) file1 = open(tmp, "w") print >> file1, new_caption.encode("utf-8") file1.close() command.append('-Caption-Abstract<=%s' % (tmp)) if new_datetime: command.append('-DateTimeOriginal="%s"' % ( new_datetime.strftime("%Y:%m:%d %H:%M:%S"))) if new_keywords: for keyword in new_keywords: command.append(u'-keywords=%s' % (keyword)) elif new_keywords != None: command.append('-keywords=') if new_rating >= 0: command.append('-Rating=%d' % (new_rating)) if new_gps: command.append('-c') command.append('%.6f') command.append('-GPSLatitude="%f"' % (abs(new_gps.latitude))) command.append('-GPSLatitudeRef=' + new_gps.latitude_ref()) command.append('-GPSLongitude="%f"' % (abs(new_gps.longitude))) command.append('-GPSLongitudeRef=' + new_gps.longitude_ref()) if new_persons: for person in new_persons: command.append(u'-RegionPersonDisplayName=%s' % (person)) elif new_persons != None: command.append('-RegionPersonDisplayName=') if new_rectangles: for rectangle in new_rectangles: command.append('-RegionRectangle=%s' % ( ','.join(str(c) for c in rectangle))) elif new_rectangles != None: command.append('-RegionRectangle=') command.append("-iptc:CodedCharacterSet=ESC % G") command.append(filepath) result = su.fsdec(su.execandcombine(command)) if tmp: os.remove(tmp) if result.find("1 image files updated") != -1: if result != "1 image files updated": su.pout(result) # wipe out the back file created by exiftool backup_file = filepath + "_original" if os.path.exists(backup_file): os.remove(backup_file) return True else: su.perr("Failed to update IPTC data in image %s: %s" % ( filepath, result)) return False
def __init__(self, key, data, keyword_map, face_map, aperture_data): self.id = key self.data = data self._caption = su.nn_string(data.get("Caption")).strip() self.comment = su.nn_string(data.get("Comment")).strip() version = None if aperture_data: version = aperture_data.versions.get(key) if data.has_key("DateAsTimerInterval"): self.date = applexml.getappletime(data.get("DateAsTimerInterval")) elif version: self.date = version.image_date else: # Try to get the date from a the caption in "YYYYMMDD ..." format m = re.match(_CAPTION_PATTERN, self._caption) if m: year = int(m.group(1)) month = int(m.group(2)) if not month: month = 1 date = int(m.group(3)) if not date: date = 1 self.date = datetime.datetime(year, month, date) else: self.date = None self.mod_date = applexml.getappletime( data.get("ModDateAsTimerInterval")) self.image_path = data.get("ImagePath") if data.has_key("Rating"): self.rating = int(data.get("Rating")) elif version: self.rating = version.mainRating else: self.rating = None if data.get("longitude"): latitude = float(data.get("latitude")) longitude = float(data.get("longitude")) self.gps = imageutils.GpsLocation(latitude, longitude) elif version: self.gps = version.location else: self.gps = None self.keywords = [] keyword_list = data.get("Keywords") if keyword_list is not None: for i in keyword_list: self.keywords.append(keyword_map.get(i)) elif version: self.keywords = version.keywords if version: self.originalpath = None # This is just a placeholder... # Use the preview if there are adjustments. if (version.rotation or version.hasAdjustments or not su.getfileextension(version.master_image_path) in _JPG_EXTENSIONS): #if version.rotation: # su.pout(u"Rotated: %s (%d)" % (self._caption, version.rotation)) #if version.hasAdjustments: # su.pout(u"Adjustments: %s" % (self._caption)) #if not su.getfileextension(version.master_image_path) in _JPG_EXTENSIONS: # su.pout(u"Not JPEG: %s" % (self._caption)) self.originalpath = version.master_image_path if not version.imageProxy.fullSizePreviewPath: su.pout(u"No preview path for %s." % (self.caption)) else: self.image_path = version.imageProxy.fullSizePreviewPath else: self.image_path = version.master_image_path self.originalpath = None if not version.imageProxy.fullSizePreviewUpToDate: su.pout(u"%s: full size preview not up to date." % (self.caption)) else: self.originalpath = data.get("OriginalPath") self.roll = data.get("Roll") self.albums = [] # list of albums that this image belongs to self.faces = [] self.face_rectangles = [] self.event_name = '' # name of event (roll) that this image belongs to self.event_index = '' # index within event self.event_index0 = '' # index with event, left padded with 0 face_list = data.get("Faces") if face_list: for face_entry in face_list: face_key = face_entry.get("face key") face_name = face_map.get(face_key) if face_name: self.faces.append(face_name) # Rectangle is '{{x, y}, {width, height}}' as ratios, # referencing the lower left corner of the face rectangle, # with lower left corner of image as (0,0) rectangle = parse_face_rectangle(face_entry.get("rectangle")) # Convert to using center of area, relative to upper left corner of image rectangle[0] += rectangle[2] / 2.0 rectangle[1] = max(0.0, 1.0 - rectangle[1] - rectangle[3] / 2.0) self.face_rectangles.append(rectangle) # Other keys in face_entry: face index # Now sort the faces left to right. sorted_names = {} sorted_rectangles = {} for i in xrange(len(self.faces)): x = self.face_rectangles[i][0] while sorted_names.has_key(x): x += 0.00001 sorted_names[x] = self.faces[i] sorted_rectangles[x] = self.face_rectangles[i] self.faces = [sorted_names[x] for x in sorted(sorted_names.keys())] self.face_rectangles = [ sorted_rectangles[x] for x in sorted(sorted_rectangles.keys())]
def get_photo_caption(photo, container, caption_template): """Gets the caption for a IPhotoImage photo, using a template. Supports: {caption} - the iPhoto caption (title). {description} - the iPhoto comment. {dated_caption_description} - the caption and comments from an IPhotoImage combined into a single string, nicely formatted like YYYY/MM/DD title: description. {folder_description} - the iPhoto comment from the enclosing event, folder, or album Args: photo - an IPhotoImage photo. caption_template - a format string. """ nodate_title_description = photo.caption match = re.match(_CAPTION_PATTERN_INDEX, photo.caption) if not match: match = re.match(_CAPTION_PATTERN, photo.caption) else: # Strip off trailing index nodate_title_description = '%s%s%s %s' % ( match.group(1), match.group(2), match.group(3), match.group(4)) if match: # Strip of leading date nodate_title_description = nodate_title_description[8:].strip() title_description = photo.caption if photo.comment: title_description += ': ' + photo.comment nodate_title_description += ': ' + photo.comment folder_description = container.getcommentwithouthints().strip() if photo.date: year = str(photo.date.year) month = str(photo.date.month).zfill(2) day = str(photo.date.day).zfill(2) else: year = '' month = '' day = '' names = photo.getfaces() if names: face_list = '(%s)' % (', '.join(names)) else: face_list = '' if check_faces_in_caption(photo): opt_face_list = '' else: opt_face_list = '(%s)' % (', '.join(photo.getfaces())) try: return caption_template.format( title=photo.caption, description=photo.comment, title_description=title_description, nodate_title_description=nodate_title_description, folder_description=folder_description, yyyy=year, mm=month, dd=day, face_list=face_list, opt_face_list=opt_face_list).strip() except KeyError, ex: su.pout(u'Unrecognized field in caption template: %s. Use one of: title, description, ' 'title_description, yyyy, mm, dd.' % (str(ex))) return caption_template