예제 #1
0
def rotate(subname: str = "",
           folder: str = r"",
           sign=1,
           override=True,
           ask=True):
    """
    rotate back according to tag information (Rotate 90 CW or Rotate 270 CW)
    Some programs like franzis hdr projects rotate the resolution of the picture -> picture gets upward resolution and
    shown as rotated two times. This function reverses the resolution rotation according to exif info.
    Pictures that either have no rotation according to exif or have a normal resolution ratio are not modified.
    So calling it a second time wont change anything.
    :param subname: only files that contain this name are rotated, empty string: no restriction
    :param sign: direction of rotation
    :param folder: only files in directories that match this regex are rotated, empty string: no restriction
    :param override: override file with rotated one
    :param ask: if should ask for user confirmation
    """
    log_function_call(rotate.__name__, subname, folder, sign, override, ask)
    from PIL import Image

    NFiles = 0
    clock = Clock()
    inpath = os.getcwd()
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, regex=folder): continue
        if len(filenames) == 0: continue
        Tagdict = read_exiftags(dirpath, settings.image_types, ask=ask)
        if has_not_keys(Tagdict, keys=["Orientation"]): continue
        leng = len(list(Tagdict.values())[0])
        for i in range(leng):
            # Load the original image:
            model = create_model(Tagdict, i)
            if not subname in model.filename: continue
            if model.is_rotated_by(0) or not model.is_upward():
                continue
            name = model.get_path()
            log().info("rotate %s", model.filename)
            img = Image.open(name)
            if model.is_rotated_by(90):
                img_rot = img.rotate(90 * sign, expand=True)
            elif model.is_rotated_by(-90):
                img_rot = img.rotate(-90 * sign, expand=True)
            else:
                continue
            NFiles += 1
            if not override:
                name = name[:name.rfind(".")] + "_ROTATED" + name[name.
                                                                  rfind("."):]
            img_rot.save(name, 'JPEG', quality=99, exif=img.info['exif'])
    clock.finish()
예제 #2
0
def write_exiftags(tagDict: dict, inpath: str = "", options: List[str] = None):
    if not options:
        options = []
    log_function_call_debug(write_exiftags.__name__, tagDict, inpath, options)
    if not inpath:
        inpath = os.getcwd()
    clock = Clock()
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if not settings.includeSubdirs and not inpath == dirpath: break
        n = count_files(filenames, settings.image_types + settings.video_types)
        if n == 0:
            log().info("  No matching files in %s",
                       os.path.relpath(dirpath, inpath))
            continue
        all_options = options + tag_dict_to_options(tagDict)
        call_exiftool(dirpath, "*", all_options, True)
        log().info("%4d tags written in   %s", n,
                   os.path.relpath(dirpath, inpath))
    clock.finish()
예제 #3
0
def read_exiftags(inpath="",
                  file_types: List[str] = settings.image_types,
                  skipdirs: List[str] = None,
                  ask=True,
                  ignore_model=False) -> Dict[str, list]:
    if not skipdirs:
        skipdirs = []
    if not inpath:
        inpath = os.getcwd()
    file_types = _get_distinct_filestypes(file_types)
    number_of_files = count_files_in(inpath, file_types, skipdirs)
    if number_of_files == 0:
        log().debug("no %s Files in %s, settings.includeSubdirs: %r",
                    file_types, inpath, settings.includeSubdirs)
        return {}
    log().info("process %d %s Files in %s, settings.includeSubdirs: %r",
               number_of_files, file_types, inpath, settings.includeSubdirs)
    if ask: askToContinue()

    clock = Clock()
    ListOfDicts = []
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, skipdirs): continue
        if count_files(filenames, file_types) == 0:
            log().info("  No matching files in %s",
                       os.path.relpath(dirpath, inpath))
            continue
        for filetype in file_types:
            if count_files(filenames, [filetype]) == 0:
                continue
            out, err = call_exiftool(dirpath, "*" + filetype, [], False)
            out = out[out.find("ExifTool Version Number"):]
            out_split = out.split("========")
            log().info("%4d tags of %s files extracted in %s", len(out_split),
                       filetype, os.path.relpath(dirpath, inpath))
            for tags in out_split:
                ListOfDicts.append(decode_exiftags(tags))

    outdict = listsOfDicts_to_dictOfLists(ListOfDicts)
    if not outdict: return {}
    outdict = sort_dict_by_date_and_model(outdict, ignore_model)
    clock.finish()
    return outdict
예제 #4
0
def first_date_per_folder() -> OrderedDict:
    """
    find files with missing exif data
    """
    log_function_call(first_date_per_folder.__name__)

    clock = Clock()
    inpath = os.getcwd()
    folder_dict = OrderedDict()

    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath): continue
        if fileop.count_files(filenames, settings.image_types) == 0: continue
        Tagdict = read_exiftags(dirpath, settings.image_types, ask=False)
        if len(list(Tagdict.values())) == 0: continue
        folder_dict[os.path.relpath(dirpath, inpath)] = [
            date for date in Tagdict["Date/Time Original"] if date
        ][0]

    clock.finish()
    return folder_dict
예제 #5
0
def name_to_exif(folder=r"", additional_tags=(), startdir=None):
    """
    extract title, description and mode from name and write them to exif

    deprecated: try to use write_exif_using_csv() instead
    """
    inpath = os.getcwd()
    clock = Clock()
    file_types = settings.image_types + settings.video_types
    log().info("process %d Files in %s, subdir: %r", count_files_in(inpath, file_types, ""), inpath,
               settings.includeSubdirs)
    askToContinue()
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, regex=folder): continue
        filenames = filterFiles(filenames, file_types)
        for filename in filenames:
            meta_data = FileMetaData(dirpath, filename)
            if startdir:
                meta_data.import_fullname(startdir)
            else:
                meta_data.import_filename()
            meta_data.update({'tags': additional_tags})
            write_exiftag(meta_data.to_tag_dict(), dirpath, filename)
    clock.finish()
예제 #6
0
def find_bad_exif(do_move=True,
                  check_date_additional=False,
                  folder: str = r""):
    """
    find files with missing exif data
    """
    log_function_call(find_bad_exif.__name__, do_move)

    clock = Clock()
    inpath = os.getcwd()
    lines_no_tags = OrderedSet()
    lines_bad_date_additional = OrderedSet()
    lines_date_missing = OrderedSet()
    out_filename_no_tags = get_info_dir("no_tags.csv")
    file_no_tags, writer_no_tags = fileop.create_csv_writer(
        out_filename_no_tags, ["directory", "name_part"])
    out_filename_bad_date_additional = get_info_dir("bad_date_additional.csv")
    file_bad_date_additional, writer_bad_date_additional = fileop.create_csv_writer(
        out_filename_bad_date_additional, ["directory", "name_part"])
    out_filename_date_missing = get_info_dir("date_missing.csv")
    file_date_missing, writer_date_missing = fileop.create_csv_writer(
        out_filename_date_missing, ["directory", "name_part"])
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, regex=folder): continue
        if fileop.count_files(filenames, settings.image_types) == 0: continue
        Tagdict = read_exiftags(dirpath, settings.image_types, ask=False)
        if len(list(Tagdict.values())) == 0: continue
        leng = len(list(Tagdict.values())[0])
        for i in range(leng):
            if (not "Keywords" in Tagdict or not Tagdict["Keywords"][i]) or \
                    (not "Subject" in Tagdict or not Tagdict["Subject"][i]) or \
                    (not "Description" in Tagdict or not Tagdict["Description"][i]) or \
                    (not "User Comment" in Tagdict or not Tagdict["User Comment"][i]):
                lines_no_tags.add((os.path.basename(dirpath),
                                   _remove_counter(Tagdict["File Name"][i])))
                if do_move and not "bad_exif" in dirpath:
                    move(
                        Tagdict["File Name"][i], dirpath,
                        dirpath.replace(
                            inpath, os.path.join(inpath, "bad_exif_keywords")))
            if not "Date/Time Original" in Tagdict or not Tagdict[
                    "Date/Time Original"][i]:
                lines_date_missing.add(
                    (os.path.basename(dirpath),
                     _remove_counter(Tagdict["File Name"][i])))
                if do_move and not "bad_exif" in dirpath:
                    move(
                        Tagdict["File Name"][i], dirpath,
                        dirpath.replace(
                            inpath,
                            os.path.join(inpath, "bad_exif_date_missing")))
            if check_date_additional and \
                    (("Date Created" in Tagdict and Tagdict["Date Created"][i]) or
                     ("Time Created" in Tagdict and Tagdict["Time Created"][i]) or
                     ("Create Date" in Tagdict and Tagdict["Create Date"][i]) or
                     ("Modify Date" in Tagdict and Tagdict["Modify Date"][i]) or
                     ("Digital Creation Date" in Tagdict and Tagdict["Digital Creation Date"][i])):
                lines_bad_date_additional.add(
                    (os.path.basename(dirpath),
                     _remove_counter(Tagdict["File Name"][i])))
                if do_move and not "bad_exif" in dirpath:
                    move(
                        Tagdict["File Name"][i], dirpath,
                        dirpath.replace(
                            inpath,
                            os.path.join(inpath, "bad_exif_date_additional")))
    writer_no_tags.writerows(lines_no_tags)
    writer_bad_date_additional.writerows(lines_bad_date_additional)
    writer_date_missing.writerows(lines_date_missing)
    file_no_tags.close()
    file_bad_date_additional.close()
    file_date_missing.close()
    clock.finish()
예제 #7
0
def write_exif_using_csv(csv_filenames: Union[str, List[str]] = "*", folder: str = r"", start_folder: str = "",
                         csv_folder: str = None, csv_restriction: str = "", import_filename: bool = True,
                         import_exif: bool = True,
                         only_when_changed: bool = False, overwrite_gps: bool = False, is_video: bool = False):
    """
    csv files are used for setting tags
    the csv files have to be separated by semicolon
    empty values in a column or not present columns are not evaluated
    each '' in the following is a possible column name

    can also be used without csv files at all just to import filename to tags

    following restrictions to files are possible:
        'directory': checks if value is part of directory
        'name_main': checks if value is the first part of filename
        'first': int counter min
        'last': int counter max
        'name_part': checks if value is part of filename

    :param csv_filenames:
        can be either "*" for all files in directory or a iterable of filenames

        can set follow exif information: ['title', 'tags', 'tags2', 'tags3', 'rating', 'description', 'gps']
            tags are expected to be separated by ', '
            rating is expected to be in interval [0,5]
            gps is expected to be 'lat, long' in decimal notation
        can set Location via ['Country', 'State', 'City', 'Location']

        sets also structured description for image processing like HDR and Panorama
        columns starting with
            'HDR' are evaluated as HDR description
            'TM' are evaluated as HDR Tone Mapping description
            'PANO' are evaluated as Panorama description
    :param csv_folder: location of csv files - standard is the .EXIFnaming/info
    :param csv_restriction: files that do not pass any of the restriction in this file are not modified at all
    :param folder: process only folders matching this regex
    :param start_folder: directories before this name will be ignored, does not needs to be a full directory name
    :param import_filename: whether to extract tags from filename
    :param import_exif: whether to extract tags from exif
    :param overwrite_gps: modifier for import_exif, overwrites gps data with information of csv
    :param only_when_changed: when true filename is not imported to tags for files without matching entries in csv
        useless if csv_restriction is set
    :param is_video: wheter video types should be written - video types might not handle tags right
    """
    if not csv_folder:
        csv_folder = get_setexif_dir()

    log_function_call(write_exif_using_csv.__name__, csv_filenames, folder, start_folder, csv_folder, csv_restriction,
                      import_filename, import_exif, only_when_changed, overwrite_gps)
    inpath = os.getcwd()
    clock = Clock()
    csv.register_dialect('semicolon', delimiter=';', lineterminator='\r\n')

    if csv_filenames == "*":
        csv_filenames = filterFiles(os.listdir(csv_folder), [".csv"])
    elif csv_filenames:
        csv_filenames = [csv_filename + ".csv" for csv_filename in csv_filenames]
    csv_filenames = [os.path.join(csv_folder, csv_filename) for csv_filename in csv_filenames]
    if csv_restriction:
        csv_restriction = os.path.join(csv_folder, csv_restriction) + ".csv"

    filetypes = settings.video_types if is_video else settings.image_types

    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, regex=folder, start=start_folder): continue
        for filename in filterFiles(filenames, filetypes):
            meta_data = FileMetaData(dirpath, filename)
            if not _passes_restrictor(meta_data, csv_restriction): continue
            if import_filename: meta_data.import_filename()
            if import_exif: meta_data.import_exif(overwrite_gps)

            for csv_filename in csv_filenames:
                with open(csv_filename, "r", encoding="utf-8") as csvfile:
                    reader = csv.DictReader(csvfile, dialect='semicolon')
                    for row in reader:
                        meta_data.update(row)

            if not only_when_changed or meta_data.has_changed:
                write_exiftag(meta_data.to_tag_dict(), meta_data.directory, meta_data.filename)

    clock.finish()