def upload_single(self, filepath, test=False):

        self.modified = False

        google_cache = GoogleLibrary.cache()
        google_image_filenames = google_cache.get('image_filenames')

        filename = os.path.basename(filepath)
        if filename in google_image_filenames:
            logging.info(f"Image already uploaded: '{filename}'")
            return
        elif test:
            logging.info(f"Image NEEDS uploading: '{filename}'")
            return

        image_spec_list = [{'filepath': filepath, 'filename': filename}]
        creds = GoogleService.credentials()

        try:
            if not test:
                self.upload_image_spec_list(image_spec_list, creds)

        finally:
            if self.modified:
                GoogleLibrary.save_library()
    def upload_image_spec_list(self, image_spec_list, creds):

        # Get captions for each image
        self.get_image_spec_list_captions(image_spec_list)

        # Chunk the spec list upload to max size
        media_item_count = len(image_spec_list)
        chunk_size = 50
        chunk_index = 0
        while chunk_index < media_item_count:
            logging.info(
                f"Batch uploading images byes, chunk index '{chunk_index}'")

            # Get the list into chunks
            chunk_image_spec_list = image_spec_list[chunk_index:(chunk_index +
                                                                 chunk_size)]

            # Build new media item list for batch upload
            newMediaItems = []
            for image_spec in chunk_image_spec_list:

                # Upload the images bytes to the google server
                self.upload_image_spec(image_spec, creds)

                # build
                newMediaItem = {
                    'description': image_spec.get('caption'),
                    'simpleMediaItem': {
                        'uploadToken': image_spec.get('upload_token'),
                        'fileName': image_spec.get('filename'),
                    }
                }
                newMediaItems.append(newMediaItem)

            # Batch upload media items now
            request_body = {'newMediaItems': newMediaItems}

            logging.info(
                f"Batch uploading images for chunk index '{chunk_index}'")

            service = GoogleService.service()
            upload_response = service.mediaItems().batchCreate(
                body=request_body).execute()

            # Save the newly created images in local cache
            google_cache = GoogleLibrary.cache()
            google_image_ids = google_cache['image_ids']
            google_image_filenames = google_cache['image_filenames']

            if upload_response is not None:
                newMediaItemResults = upload_response.get(
                    'newMediaItemResults')
                for newMediaItemResult in newMediaItemResults:
                    status = newMediaItemResult.get('status')
                    message = status.get('message') if status else None
                    if 'Success' == message:
                        mediaItem = newMediaItemResult.get('mediaItem')
                        GoogleLibrary.cache_image(mediaItem, google_image_ids,
                                                  google_image_filenames)
                        self.modified = True
Example #3
0
    def upload(self, root, test=False):
        self.modified = False

        try:
            self.upload_recursive(root, test)

        finally:
            if self.modified:
                GoogleLibrary.save_library()
    def upload(self, folder, recursive=True, test=False):

        self.modified = False

        try:
            self.upload_recursive(folder, recursive, test)

        finally:
            if self.modified:
                GoogleLibrary.save_library()
    def upload_image_spec(self, image_spec, creds):

        filepath = image_spec.get('filepath')
        filename = image_spec.get('filename')

        # If image already in the local cache then ignore it
        google_cache = GoogleLibrary.cache()
        google_image_filenames = google_cache.get('image_filenames')
        if filename in google_image_filenames:
            logging.info(f"Image already uploaded: '{filename}'")
            return

        # Upload image bytes
        response = self.upload_image_bytes(filepath, filename, creds)
        status_code = response.status_code
        if response is None or status_code != 200:
            logging.error(
                f"Unable to upload bytes for image '{filepath}', response_code: '{status_code}'"
            )
            return

        # Update the upload_toke
        image_spec['upload_token'] = response.content.decode('utf-8')

        # Later in the main program save the Google cache
        self.modified = True
    def upload_recursive(self, folder, recursive=True, test=False):

        logging.info(f"uploading images in folder: ({folder})")

        # Get only media types in the folder
        # image spec list holds a list of objects, sample below:
        # [
        #     {
        #         'filepath': ...,
        #         'filename': ...,
        #         'upload_token': ...
        #     },
        #     {
        #         .... next image object ....
        #     }
        # ]

        image_spec_list = []
        folder_items = os.listdir(folder)
        filenames = [
            f for f in folder_items
            if os.path.isfile(os.path.join(folder, f)) and self.is_media(f)
        ]

        google_cache = GoogleLibrary.cache()
        google_image_filenames = google_cache.get('image_filenames')

        for filename in filenames:

            # If image already in cache then ignore
            if filename in google_image_filenames:
                logging.info(f"Image already uploaded: '{filename}'")
                continue
            elif test:
                logging.info(f"Image needs upload: '{filename}'")
                continue

            # Build the spec for each file
            filepath = os.path.join(folder, filename)
            image_spec = {'filepath': filepath, 'filename': filename}
            image_spec_list.append(image_spec)

        if len(image_spec_list) <= 0:
            logging.info(f"NO IMAGES TO UPLOAD IN FOLDER '{folder}'")
        else:
            creds = GoogleService.credentials()
            self.upload_image_spec_list(image_spec_list, creds)

        # Traverse sub-folders if recursive is specified
        if not recursive:
            return
        dirnames = [
            d for d in folder_items if os.path.isdir(os.path.join(folder, d))
        ]
        for dirname in dirnames:
            self.upload_recursive(os.path.join(folder, dirname), recursive,
                                  test)
Example #7
0
    def create_shareable_album(self, service, album_name, test):

        # Create google album
        request_body = {'album': {'title': album_name}}

        if test:
            logging.info(f"Test Album Create: {album_name}")
            return None

        album_create_response = service.albums().create(
            body=request_body).execute()
        google_album_id = album_create_response.get('id')
        logging.info(f"Album Created: {album_create_response}")

        # Make Google album sharable
        request_body = {
            'sharedAlbumOptions': {
                'isCollaborative': True,
                'isCommentable': True
            }
        }
        album_share_response = service.albums().share(
            albumId=google_album_id, body=request_body).execute()

        # Now get the album from Google to see if it has been created as shareable
        album_get_response = service.albums().get(
            albumId=google_album_id).execute()

        # We will now add it to our local cache and save the cache
        google_cache = GoogleLibrary.cache()
        google_album_ids = google_cache.get('album_ids')
        google_album_titles = google_cache.get('album_titles')

        if album_get_response is not None:
            GoogleLibrary.cache_album(album_get_response,
                                      google_album_ids,
                                      google_album_titles,
                                      shared=True)

        return album_get_response
    def map_recursive(self, root, test):
        """
        High-level algorithm:
        1. For each local folder locate the Google album in cache
        2. If Google album does not exist then call 'gphotocli album upload <...path_to_album...>'
            - Add local images to Google Album from the Local album if missing
            - Remove images from Google album that are not in Local album
        """
        # Argument validation
        if not os.path.exists(root):
            logging.error(f"Folder does not exist: ({root})")
            return

        # Remove trailing slash
        slash_char = root[len(root) - 1]
        if slash_char == '/' or slash_char == '\\':
            root = root[:len(root)-1]

        # Get Google API service
        service = GoogleService.service()

        # Initialize Google API and load cache.
        google_cache = GoogleLibrary.cache()
        google_album_ids = google_cache.get('album_ids')
        google_album_titles = google_cache.get('album_titles')

        # Load local library cache
        local_cache = LocalLibrary.cache('jpg')
        local_albums = local_cache.get('albums')

        # Traverse all the sub folders in the cache
        for local_album in local_albums:

            local_album_name = local_album['name']
            local_album_path = local_album['path']

            if not local_album_path.lower().startswith(root.lower()):
                continue

            # If album not in Google Cache, ignore and then error out
            google_album_id = google_album_titles.get(local_album_name)
            google_album = google_album_ids[google_album_id] if google_album_id is not None else None

            if google_album is None:
                logging.error(f"Ignoring album not in Google Cache: '{google_album.get('title')}'")
                continue

            # Do mapping for each Local/Google album
            self.map_album(local_album, google_album, test)
Example #9
0
    def upload_recursive(self, root, test):

        # Argument validation
        if not os.path.exists(root):
            logging.error(f"Folder does not exist: ({root})")
            return

        # Remove trailing slash
        slash_char = root[len(root) - 1]
        if slash_char == '/' or slash_char == '\\':
            root = root[:len(root) - 1]

        # Get Google API service
        service = GoogleService.service()

        # Initialize Google API and load cache.
        google_cache = GoogleLibrary.cache()
        google_album_ids = google_cache.get('album_ids')
        google_album_titles = google_cache.get('album_titles')

        # Traverse all the sub folders in the cache
        local_cache = LocalLibrary.cache('jpg')
        local_albums = local_cache.get('albums')

        for local_album in local_albums:

            local_album_name = local_album['name']
            local_album_path = local_album['path']

            if not local_album_path.lower().startswith(root.lower()):
                continue

            # Check if album already in Google Cache
            google_album_id = google_album_titles.get(local_album_name)
            google_album = google_album_ids[
                google_album_id] if google_album_id is not None else None

            if google_album is not None:
                logging.info(
                    f"Album already uploaded: '{google_album.get('title')}'")
                continue

            # Do the actual creating of Google album
            album_response = self.create_shareable_album(
                service=service, album_name=local_album_name, test=test)
            if album_response:
                self.modified = True
    def map_album(self, local_album, google_album, test):

        logging.error(f"Mapping album: '{google_album.get('title')}'")

        # Initialize Google API and load cache.
        google_cache = GoogleLibrary.cache()
        google_album_ids = google_cache.get('album_ids')
        google_album_titles = google_cache.get('album_titles')
        google_image_ids = google_cache.get('image_ids')
        google_album_to_images = google_cache.ge_ids('album_images')

        # Load local library cache
        local_cache = LocalLibrary.cache('jpg')
        local_albums = local_cache.get('albums')
        local_images = local_cache.get('images')
        # local_image_ids = local_cache.get('image_ids')

        # Collect local images belonging to local album
        local_album_image_idxs = local_album.get('images')
        if local_album_image_idxs is None or len(local_album_image_idxs) <= 0:
            logging.info(f"No images found in album '{local_album.get('name')}'")
            return

        # from local album images indices, build local album image list
        local_album_images = {}
        for idx in local_album_image_idxs:
            local_image = local_images[idx]
            local_image_name = local_image.get('name')
            local_album_images[local_image_name] = local_image

        # From google album get images already in it
        google_album_id = google_album.get('id')
        google_album_to_image_ids = google_album_to_images.get(id)
        google_album_images = {}
        for google_image_id in google_album_to_image_ids:
            google_album_image = google_image_ids.get(google_image_id)
            google_album_images[google_image_id] = google_album_image
Example #11
0
def main():
    """
    Given an image pattern find the image and does it have a
    parent album.  Also find other related images
    Arguments:
        <patterns>: List of Image pattern
    """
    if len(sys.argv) < 2:
        logging.error("Too few arguments.  See help")
        return

    # Get arguments
    patterns = sys.argv[1:]

    # Load cache
    GoogleLibrary.load_library()
    cache = GoogleLibrary.cache()
    google_album_ids = cache['album_ids']
    google_album_titles = cache['album_titles']
    google_image_ids = cache['image_ids']
    google_image_filenames = cache['image_filenames']
    google_album_images = cache['album_images']
    google_image_albums = cache['image_albums']

    # Define the result
    #   result = {
    #       'seed_image': {         #  first Images that matched the pattern
    #           'id': ...,
    #           'filename': ...,
    #           'creationTime'
    #       },
    #       'albums': [
    #           "album id": {
    #               'title': ....,
    #               'images': [
    #                   {
    #                       'id': ...,
    #                       'filename': ...,
    #                       'creationTime'
    #                   },
    #                       .....more imahes...
    #               ]
    #           },
    #               .... more albums ...
    #       ]
    #   }

    result_albums = []
    result = {'albums': result_albums}

    # Find images with the give patterns
    for google_image_id, google_image in google_image_ids.items():
        google_image_filename = google_image['filename']
        result_pattern = [p for p in patterns if p in google_image_filename]
        if result_pattern:

            # Add image to the result
            result['seed_image'] = {
                'id':
                google_image_id,
                'filename':
                google_image.get('filename'),
                'creationTime':
                google_image.get('mediaMetadata').get('creationTime')
            }

            # Get list of parent albums
            result_image_albums = google_image_albums.get(google_image_id)
            if result_image_albums is not None and len(
                    result_image_albums) > 0:

                # For each album id in image parent list add it to the result
                for result_image_album_id in result_image_albums:
                    google_album = google_album_ids.get(result_image_album_id)

                    image_list = []
                    result_album = {
                        'id': result_image_album_id,
                        'title': google_album.get('title'),
                        'images': image_list
                    }

                    result_albums.append(result_album)

                    # Get the list of all images for this album and add to the result
                    result_album_image_ids = google_album_images.get(
                        result_image_album_id)
                    for result_album_image_id in result_album_image_ids:
                        image = google_image_ids.get(result_album_image_id)
                        image_list.append(image.get('filename'))
                    image_list.sort()

            break

    util.pprint(result)
def main():
    """
    Given a folder tree root like p:\\pics\\2014 loop through
    each album and find its images in Google photos.
    if the images do not have albums then they can be deleted.
    if the images have an album then
        and if the album have more images that the local album images
        and the albums is not shared then the images can be deleted
    """
    if len(sys.argv) < 2:
        logging.error("Too few arguments.  Specify folder pattern")
        return

    # Get arguments
    arg_album_year = sys.argv[1]
    arg_album_pattern = f"\\{arg_album_year}\\"

    LocalLibrary.load_library('jpg')
    local_cache = LocalLibrary.cache_jpg()
    local_albums = local_cache.get('albums')
    local_album_paths = local_cache.get('album_paths')
    local_images = local_cache.get('images')

    GoogleLibrary.load_library()
    google_cache = GoogleLibrary.cache()
    google_album_ids = google_cache['album_ids']
    google_album_titles = google_cache['album_titles']
    google_image_ids = google_cache['image_ids']
    google_image_filenames = google_cache['image_filenames']
    google_album_images = google_cache['album_images']
    google_image_albums = google_cache['image_albums']

    result = []

    # Loop through each local folder under the root tree
    for local_album in local_albums:
        local_album_path = local_album.get('path')

        # filter out the ones that are not under the tree
        if local_album_path.find(arg_album_pattern) == -1:
            continue
        # if not local_album_path.startswith(arg_album_pattern):
        #     continue

        # Add this album to the list
        result_album = {'path': local_album.get('path')}
        result.append(result_album)

        # Get first jpeg image of the local album
        first_local_image = None
        local_album_image_idxs = local_album.get('images')
        for local_album_image_idx in local_album_image_idxs:

            local_image = local_images[local_album_image_idx]
            if local_image.get('mime') == 'image/jpeg':
                first_local_image = local_image
                break

        if first_local_image is None:
            result_album[
                'ERROR'] = f"No jpeg images in local album '{local_album.get('path')}'"
            continue

        result_album['first_image'] = first_local_image['name']

        # Locate this image in Google photos.  Identify the pattern
        # If the image is of the form
        #       YYYYMMDD_hhmmss_nn_AAAA_D800.jpeg
        # or just the actual name
        # First look for the images with actual name, if not found then
        # Look by date time in the filename

        first_google_image_id, pattern_found = find_google_image(
            first_local_image, google_image_ids, google_image_filenames)

        if first_google_image_id is None:
            result_album[
                'WARNING'] = f"First album image not in Google {first_local_image.get('name')}"
            continue

        first_google_image = google_image_ids.get(first_google_image_id)
        result_album['first_google_image'] = {
            'id': first_google_image.get('id'),
            'filename': first_google_image.get('filename'),
            'mine': first_google_image.get('mine'),
            'productUrl': first_google_image.get('productUrl')
        }

        # if the first image part of google album then
        # we need to know if the image is part of a shared album
        google_image_album_list = google_image_albums.get(
            first_google_image_id)
        if google_image_album_list is None or len(
                google_image_album_list) <= 0:
            result_album['NO-GOOGLE-ALBUM'] = True
        else:
            result_image_albums = []
            result_album['HAS-ALBUMS'] = result_image_albums
            for google_image_album_id in google_image_album_list:
                google_album = google_album_ids.get(google_image_album_id)
                result_image_albums.append({
                    'id':
                    google_album.get('id'),
                    'title':
                    google_album.get('title'),
                    'productUrl':
                    google_album.get('productUrl'),
                    'shared':
                    google_album.get('shared')
                })

    gphoto.save_to_file(result,
                        f"can_google_images_be_deleted_{arg_album_year}.json")
Example #13
0
 def __init__(self):
     LocalLibrary.load_library('jpg')
     GoogleLibrary.load_library()
     self.modified = False
Example #14
0
def main():
    """
    Specify the year in which to narrow the search of images belonging to an album
    """
    if len(sys.argv) < 2:
        logging.error("Too few arguments.  See help")
        return

    # Get arguments
    arg_year = sys.argv[1]

    GoogleLibrary.load_library()
    google_cache = GoogleLibrary.cache()
    google_album_ids = google_cache['album_ids']
    google_album_titles = google_cache['album_titles']
    google_image_ids = google_cache['image_ids']
    google_image_filenames = google_cache['image_filenames']
    google_album_images = google_cache['album_images']
    google_image_albums = google_cache['image_albums']

    result_no_dateshot = []
    result_albums = {}
    result = {'no-dateshot': result_no_dateshot, 'albums': result_albums}

    # Get dateshot.  If not there then report it
    for google_image_id, google_image in google_image_ids.items():
        dateShot = None
        image_metadata = google_image.get('mediaMetadata')
        if image_metadata:
            dateShot = image_metadata.get('creationTime')
        if dateShot is None:
            result_no_dateshot.append({
                'id':
                google_image_id,
                'productUrl':
                google_image.get('productUrl')
            })
            continue

        # We have a dateshot.  parse it and get the year
        # If year does not math then ignore the image
        image_year = dateShot.split('-')[0]
        if arg_year != image_year:
            continue

        # Get its google album and add it to the result
        google_image_album_object = google_image_albums.get(google_image_id)
        if google_image_album_object is None or len(
                google_image_album_object) <= 0:
            continue

        # This image has albums.  Add the albums to the results
        # and add the image to the albums
        for google_album_id in google_image_album_object:

            result_album = result_albums.get(google_album_id)
            result_album_images = None
            if result_album is None:
                google_album = google_album_ids.get(google_album_id)
                result_album_images = []
                result_album = {
                    'id': google_album_id,
                    'title': google_album.get('title'),
                    'productUrl': google_album.get('productUrl'),
                    'shared': google_album.get('shared'),
                    'images': result_album_images
                }
                result_albums[google_album_id] = result_album
            else:
                result_album_images = result_album.get('images')

            result_album_images.append(
                (google_image_id, google_image.get('productUrl')))
            # result_album_images.append({
            #     'id': google_image_id,
            #     'productUrl': google_image.get('productUrl')
            # })

    gphoto.save_to_file(
        result, f"google_images_belonging_to_album_in_year_{arg_year}.json")
Example #15
0
def main():
    """
    Collect all the images with the date shot of the given year.
    Check if they follow the YYYYMMDD_HHMMDD_.... format
    If all follow this format then images of the whole year can be deleted
        in one shot.
    Otherwise list out the odd image months from the date shot as culprits.
    For each of these images see in the local folder album what to do.
    """
    if len(sys.argv) < 2:
        logging.error("Too few arguments.  Specify date shot year")
        return

    # Get arguments
    args_year = sys.argv[1]

    LocalLibrary.load_library('jpg')
    local_cache = LocalLibrary.cache_jpg()
    local_albums = local_cache.get('albums')
    local_album_paths = local_cache.get('album_paths')
    local_images = local_cache.get('images')


    GoogleLibrary.load_library()
    google_cache = GoogleLibrary.cache()
    google_album_ids = google_cache['album_ids']
    google_album_titles = google_cache['album_titles']
    google_image_ids = google_cache['image_ids']
    google_image_filenames = google_cache['image_filenames']
    google_album_images = google_cache['album_images']
    google_image_albums = google_cache['image_albums']

    google_images_with_missing_dateshot = []
    google_images_missing_locally = []
    local_images_with_non_standandard_filename = []
    google_images_with_non_standandard_filename = []
    google_images_with_arg_year = []
    google_images_by_datetime = {}
    local_images_by_datetime = {}

    result = {
        'google_images_with_missing_dateshot': google_images_with_missing_dateshot,
        'google_images_missing_locally': google_images_missing_locally,
        'local_images_with_non_standandard_filename': local_images_with_non_standandard_filename,
        'google_images_with_non_standandard_filename': google_images_with_non_standandard_filename,
        'google_images_with_arg_year': google_images_with_arg_year,
        'google_images_by_datetime': google_images_by_datetime,
        'local_images_by_datetime': local_images_by_datetime
    }

    # First collect all google images in the given year
    for google_image_id, google_image in google_image_ids.items():
        mediaMetadata = google_image.get('mediaMetadata')
        if mediaMetadata is None:
            google_images_with_missing_dateshot.append(google_image)

        else:
            creationTime = mediaMetadata.get('creationTime')
            if creationTime is None:
                google_images_with_missing_dateshot.append(google_image)

            else:
                # Date shot is of the format "2021-02-15T20:29:52Z
                # Extract the year from it
                image_year = creationTime.split('-')[0]
                if image_year == args_year:
                    google_images_with_arg_year.append(google_image)

    # If the google images does not have format YYYYMMDD_HHMMSS_...
    # then there is an issue
    for google_image in google_images_with_arg_year:
        filename = google_image.get('filename')
        splits = filename.split('_')
        if len(splits) < 3:
            google_images_with_non_standandard_filename.append(google_image)
        else:
            image_date = splits[0]
            image_time = splits[1]
            if len(image_date) < 8 or not image_date.isdecimal():
                google_images_with_non_standandard_filename.append(google_image)
            elif len(image_time) < 6 or not image_time.isdecimal():
                google_images_with_non_standandard_filename.append(google_image)
            else:
                pass
                # image_datetime = image_date + '_' + image_time
                # google_images_by_datetime[image_datetime] = {
                #     'filename': google_image.get('filename'),
                #     'productUrl': google_image.get('productUrl')
                # }

    # now make a list of all the local images in the year specified
    # and add them to the local_images_by_dateshot.
    pattern = f"\\{args_year}\\"
    for local_album_idx, local_album in enumerate(local_albums):
        album_path = local_album.get('path')
        if pattern not in album_path:
            continue

        album_image_idxs = local_album.get('images')
        for album_image_idx in album_image_idxs:
            local_image = local_images[album_image_idx]
            local_image_name = local_image.get('name')
            splits = local_image_name.split('_')
            if len(splits) < 3:
                local_images_with_non_standandard_filename.append(local_image.get('path'))

            image_date = splits[0]
            image_time = splits[1]
            if len(image_date) < 8 or not image_date.isdecimal():
                local_images_with_non_standandard_filename.append(local_image.get('path'))
            elif len(image_time) < 6 or not image_time.isdecimal():
                local_images_with_non_standandard_filename.append(local_image.get('path'))
            else:
                image_datetime = image_date + '_' + image_time
                local_images_by_datetime[image_datetime] = {
                    'filename': local_image.get('name'),
                    'path': local_image.get('path')
                }


    # Now traverse through all the google images with date shot
    # and locate them in local images
    # If not found then error
    for datetime, google_image in google_images_by_datetime.items():
        local_image = local_images_by_datetime.get(datetime)
        if local_image is None:
            google_images_missing_locally.append(google_image)

    bn = os.path.basename(args_year)
    gphoto.save_to_file(result, f"can_google_images_be_deleted_by_year_{bn}.json")
def main():
    gphoto.init()
    GoogleLibrary.download_library()
Example #17
0
def main():
    gphoto.init()

    # Load Google Library
    GoogleLibrary.load_library()
    google_cache = GoogleLibrary.cache()
    google_album_ids = google_cache['album_ids']
    google_album_titles = google_cache['album_titles']

    google_image_ids = google_cache['image_ids']
    google_image_filenames = google_cache['image_filenames']

    google_album_images = google_cache['album_images']
    google_image_albums = google_cache['image_albums']

    # Load Local picsHres jpg Library
    LocalLibrary.load_library('jpg')
    local_cache = LocalLibrary.cache_jpg()
    local_albums = local_cache.get('albums')
    local_album_paths = local_cache.get('album_paths')
    local_album_names = local_cache.get('album_names')
    local_images = local_cache.get('images')
    local_image_ids = local_cache.get('image_ids')
    local_image_names = local_cache.get('image_names')

    # Initialize the result
    missing_images_with_album_reason = "MISSING_IMAGES_WITH_ALBUM"
    missing_images_with_no_album_reason = "MISSING_IMAGES_WITH_NO_ALBUM"
    image_exist_locally_reason = "IMAGE_EXIST_LOCALLY"

    result_missing_images_with_album = {}
    result_missing_images_with_no_album = []
    result_image_exist_locally = []
    result = {
        missing_images_with_album_reason: result_missing_images_with_album,
        missing_images_with_no_album_reason: result_missing_images_with_no_album,
        image_exist_locally_reason: result_image_exist_locally
    }

    # Walk through each Google images that begins with PFILMmmm_nnn.jpg
    for google_image_id in google_image_ids:
        google_image = google_image_ids[google_image_id]

        # Ignore images not begining with "PFILM"
        image_name = google_image.get('filename')
        if image_name is not None and not image_name.startswith("PFILM"):
            continue

        # Check for image exist locally
        local_image_idx = local_image_names.get(image_name)
        if local_image_idx is not None:
            local_image = local_images[local_image_idx]
            result_image_exist_locally.append(local_image.get('path'))
            continue

        # We now know that the image is missing locally
        # No figure out if this images does not have an album parent
        google_albums_of_this_image = google_image_albums.get(google_image_id)
        if google_albums_of_this_image is not None:

            # Images does have parent albums
            # add first album to the result first if not already done
            google_album_idx = None
            for idx in google_albums_of_this_image:
                google_album_idx = idx
                break

            google_album = google_album[google_album_idx]
            google_album_id = google_album.get('id')
            result_album = result.get(google_album_id)

            # If album not in result then add the album
            missing_images_with_album = None
            if result_album is None:

                missing_images_with_album = []
                result_album = {
                    'id': google_album_id,
                    'title': google_album.get('title'),
                    'images': missing_images_with_album
                }
                result_missing_images_with_album[google_album_id] = result_album


            # Add missing image to parent album result
            missing_images_with_album.append({
                'id': google_image_id,
                'filename': image_name,
                'productUrl': google_image['productUrl']
            })

        # Google image is missing locally and has no parent album
        else:
            result_missing_images_with_no_album.append({
                    'id': google_image_id,
                    'filename': image_name,
                    'productUrl': google_image['productUrl']
                })

    # Save to cache file also
    gphoto.save_to_file(result, "can_PFILMs_be_deleted.json")
Example #18
0
def main():

    if len(sys.argv) < 2:
        logging.error("Too few arguments.  Specify year")
        return

    # Get arguments
    arg_year = sys.argv[1]

    local_models = {}
    google_models = {}
    result = {
        'local_models': local_models,
        'google_models': google_models
    }

    GoogleLibrary.load_library()
    google_cache = GoogleLibrary.cache()
    google_image_ids = google_cache['image_ids']

    # Loop through google images
    for google_image_id, google_image in google_image_ids.items():

        mediaMetadata = google_image.get('mediaMetadata')
        if mediaMetadata is None:
            continue

        creationTime = mediaMetadata.get('creationTime')
        image_year = creationTime.split('-')[0]
        if image_year != arg_year:
            continue

        photo = mediaMetadata.get('photo')
        if photo is None:
            continue

        cameraMake = photo.get('cameraMake')
        cameraModel = photo.get('cameraModel')

        if cameraMake is None and cameraModel is None:
            continue

        makemodel = cameraMake if cameraMake is not None else ""
        makemodel += '_'
        makemodel += cameraModel if cameraModel is not None else ""

        google_models[makemodel] = None

    # Scan local library metadata
    LocalLibraryMetadata.load_library_metadata('raw')
    library_metadata_cache = LocalLibraryMetadata.cache('raw')

    for image_path, image_metadata in library_metadata_cache.items():

        dateTimeOriginal = image_metadata.get('DateTimeOriginal')
        if dateTimeOriginal is None:
            continue

        image_year = dateTimeOriginal.split(':')[0]
        if image_year != arg_year:
            continue

        model = image_metadata.get('Model')
        if model is None:
            continue

        local_models[model] = None

    # Save results
    bn = os.path.basename(arg_year)
    gphoto.save_to_file(result, f"camera_models_in_year_{bn}.json")