예제 #1
0
def folders_to_main(series: bool = False,
                    primary: bool = False,
                    blurry: bool = False,
                    dirs: list = None,
                    one_level: bool = True,
                    not_inpath: bool = True):
    """
    reverses filtering/sorting into directories
    :param series: restrict to reverse of filterSeries
    :param primary: restrict to reverse of filterPrimary
    :param blurry: restrict to reverse of detectBlurry
    :param dirs: restrict to reverse other dirs
    :param one_level: reverse only one directory up
    :param not_inpath: leave all directories in inpath as they are, only change subdirectories
    """
    log_function_call(folders_to_main.__name__, series, primary, blurry, dirs,
                      one_level, not_inpath)
    inpath = os.getcwd()
    reverseDirs = []
    if series:
        reverseDirs += ["B" + str(i) for i in range(1, 8)] + ["S", "single"]
    if primary: reverseDirs += ["B", "S", "TL", "SM", "primary"]
    if blurry: reverseDirs += ["blurry"]
    if dirs: reverseDirs += list(dirs)

    deepest = 0
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if not_inpath and dirpath == inpath: continue
        if is_invalid_path(dirpath, whitelist=reverseDirs): continue
        depth = get_relpath_depth(dirpath, inpath)
        deepest = max(deepest, depth)
        if not_inpath:
            deepest -= 1
    if not reverseDirs and deepest > 1:
        log().warning(
            "A folder structure with a depth of %2d will be flattened",
            deepest)
        askToContinue()
    elif deepest > 3:
        log().warning("The folder structure has a depth of %2d", deepest)
        log().info("chosen directory names: %r", reverseDirs)
        askToContinue()

    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if not_inpath and dirpath == inpath: continue
        if is_invalid_path(dirpath, whitelist=reverseDirs): continue
        if one_level:
            destination = os.path.dirname(dirpath)
        else:
            destination = inpath
        log().info("%s #dirs:%d #files:%d", dirpath, len(dirnames),
                   len(filenames))
        for filename in filenames:
            if not file_has_ext(filename,
                                settings.image_types + settings.video_types):
                continue
            move(filename, dirpath, destination)
        removeIfEmtpy(dirpath)
예제 #2
0
def detect_similar(similarity=0.9):
    """
    put similar pictures in same sub folder
    :param similarity: -1: completely different, 1: same
    """
    inpath = os.getcwd()
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath): continue
        print(dirpath, len(dirnames), len(filenames))
        dircounter = 0
        filenamesA = [
            filename for filename in filenames
            if file_has_ext(filename, ('.JPG', ".jpg"))
        ]
        for i, filenameA in enumerate(filenamesA):
            print(filenameA)
            notSimCounter = 0
            for filenameB in filenamesA[i + 1:]:
                if notSimCounter == 10: break
                if not isfile(dirpath, filenameA): continue
                if not isfile(dirpath, filenameB): continue
                if not are_similar(dirpath, filenameA, dirpath, filenameB,
                                   similarity):
                    notSimCounter += 1
                    continue
                notSimCounter = 0
                moveToSubpath(filenameB, dirpath, "%03d" % dircounter)
            if not os.path.isdir(os.path.join(dirpath, "%03d" % dircounter)):
                continue
            moveToSubpath(filenameA, dirpath, "%03d" % dircounter)
            dircounter += 1
예제 #3
0
def filter_series():
    """
    put each kind of series in its own directory
    """
    log_function_call(filter_series.__name__)
    inpath = os.getcwd()
    skipdirs = ["B" + str(i) for i in range(1, 8)]
    skipdirs += [
        "S", "SM", "TL", "mp4", "HDR", "single", "PANO", "others", "TLM"
    ]
    # TLM: Timelapse manual - pictures on different days to be combined to a Timelapse
    skipdirs += [model for model in c.CameraModelShort.values() if model]

    log().info(inpath)
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, skipdirs): continue
        log().info("%s #dirs:%d #files:%d", dirpath, len(dirnames),
                   len(filenames))
        filenames = moveBracketSeries(dirpath, filenames)
        filenames = moveSeries(dirpath, filenames, "S")
        filenames = moveSeries(dirpath, filenames, "SM")
        filenames = moveSeries(dirpath, filenames, "TL")
        filenames = move_media(dirpath, filenames, settings.video_types, "mp4")
        # filter process types to separate folders - attention: ordering of statements matters
        filenames = move_media(dirpath, filenames, ["PANO"], "PANO")
        filenames = move_media(dirpath, filenames, ["ANIMA"], "ANIMA")
        filenames = move_media(dirpath, filenames, ["RET"], "RET")
        filenames = move_media(dirpath, filenames, ["ZOOM"], "ZOOM")
        filenames = move_media(dirpath, filenames, ["SMALL"], "SMALL")
        filenames = move_media(dirpath, filenames, ["CUT"], "CUT")
        filenames = move_media(dirpath, filenames, ["HDR"], "HDR")
        move_media(dirpath, filenames, settings.image_types, "single")
예제 #4
0
def sanitize_filename(folder=r"",
                      posttags_to_end: List[str] = None,
                      onlyprint=False):
    """
    sanitize order of Scene and Process tags
    sanitize counter to be split by $
    sanitize sub process names added by a external program to by concat to main processname (only Hugin)
    :param folder: optional regex for restrict to folders
    :param posttags_to_end: optional for resorting special posttags to end
    :param onlyprint: if true, renaming will only printed to log and no files are renamed, good for testing
    :return:
    """
    inpath = os.getcwd()
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, regex=folder): continue
        for filename in (filenames + dirnames):
            filename = filename.replace("panorama", "PANO")
            filenameAccessor = FilenameAccessor(filename)
            _sanitize_posttags(filenameAccessor, posttags_to_end)
            _sanitize_process_counter(filenameAccessor)
            _sanitize_pano(filenameAccessor)
            filename_new = filenameAccessor.sorted_filename()
            if not filename == filename_new:
                log().info("rename: %s to %s", filename, filename_new)
                if not onlyprint:
                    renameInPlace(dirpath, filename, filename_new)
예제 #5
0
def copy_subdirectories(dest: str, dir_names: []):
    """
    copy sub folders of specified names to dest without directory structure
    :param dest: copy destination
    :param dir_names: directory names to copy
    """
    inpath = os.getcwd()
    log().info(inpath)
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, whitelist=dir_names): continue
        copyFilesTo(filenames, dest, False)
예제 #6
0
def detect_blurry():
    """
    detects blurry images and put them in a sub directory named blurry
    """
    inpath = os.getcwd()
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath): continue
        print(dirpath, len(dirnames), len(filenames))
        for filename in filenames:
            if not file_has_ext(filename, ('.JPG', ".jpg")): continue
            if not is_blurry(dirpath, filename, 30): continue
            moveToSubpath(filename, dirpath, "blurry")
예제 #7
0
def rotate(subname: str = "",
           folder: str = r"",
           sign=1,
           override=True,
           ask=True):
    """
    rotate back according to tag information (Rotate 90 CW or Rotate 270 CW)
    Some programs like franzis hdr projects rotate the resolution of the picture -> picture gets upward resolution and
    shown as rotated two times. This function reverses the resolution rotation according to exif info.
    Pictures that either have no rotation according to exif or have a normal resolution ratio are not modified.
    So calling it a second time wont change anything.
    :param subname: only files that contain this name are rotated, empty string: no restriction
    :param sign: direction of rotation
    :param folder: only files in directories that match this regex are rotated, empty string: no restriction
    :param override: override file with rotated one
    :param ask: if should ask for user confirmation
    """
    log_function_call(rotate.__name__, subname, folder, sign, override, ask)
    from PIL import Image

    NFiles = 0
    clock = Clock()
    inpath = os.getcwd()
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, regex=folder): continue
        if len(filenames) == 0: continue
        Tagdict = read_exiftags(dirpath, settings.image_types, ask=ask)
        if has_not_keys(Tagdict, keys=["Orientation"]): continue
        leng = len(list(Tagdict.values())[0])
        for i in range(leng):
            # Load the original image:
            model = create_model(Tagdict, i)
            if not subname in model.filename: continue
            if model.is_rotated_by(0) or not model.is_upward():
                continue
            name = model.get_path()
            log().info("rotate %s", model.filename)
            img = Image.open(name)
            if model.is_rotated_by(90):
                img_rot = img.rotate(90 * sign, expand=True)
            elif model.is_rotated_by(-90):
                img_rot = img.rotate(-90 * sign, expand=True)
            else:
                continue
            NFiles += 1
            if not override:
                name = name[:name.rfind(".")] + "_ROTATED" + name[name.
                                                                  rfind("."):]
            img_rot.save(name, 'JPEG', quality=99, exif=img.info['exif'])
    clock.finish()
예제 #8
0
def copy_exif_via_mainname(origin: str, target: str, overwriteDateTime: bool = False,
                           file_types: Iterable = settings.image_types):
    """
    copy exif information from files in directory origin to target
    files are matched via main name -> processed files can get exif information of original files
    :param overwriteDateTime: whether to overwrite already exiting "Date/Time Original"
    :param origin: where exif infos should be read
    :param target: where exif infos should be written to
    :param file_types: of target files, default: all image types
    """
    log_function_call(copy_exif_via_mainname.__name__, origin, target)
    inpath = os.getcwd()
    target_dict = {}
    exclusion_tags = ["--PreviewImage", "--ThumbnailImage", "--Rating"]
    command = "-TagsFromFile"
    for (dirpath, dirnames, filenames) in os.walk(os.path.join(inpath, target)):
        if is_invalid_path(dirpath): continue
        filenames = filterFiles(filenames, file_types)
        for filename in filenames:
            if not overwriteDateTime:
                tagDict = read_exiftag(dirpath, filename)
                if hasDateTime(tagDict): continue
            main = FilenameAccessor(filename).mainname()
            target_dict.setdefault(main, []).append(os.path.join(dirpath, filename))
    for (dirpath, dirnames, filenames) in os.walk(os.path.join(inpath, origin)):
        if is_invalid_path(dirpath): continue
        filenames = filterFiles(filenames, settings.image_types)
        for filename in filenames:
            main = FilenameAccessor(filename).mainname()
            if not main in target_dict: continue
            if filename in [os.path.basename(target_file) for target_file in target_dict[main]]: continue
            orgin_file = os.path.join(dirpath, filename)
            for target_file in target_dict[main]:
                commands = [command, orgin_file, target_file]
                call_exiftool_direct(exclusion_tags + commands)
            del target_dict[main]
예제 #9
0
def move_each_pretag_to_folder():
    """
    """
    log_function_call(move_each_pretag_to_folder.__name__)
    inpath = os.getcwd()
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath): continue
        for filename in filenames:
            filenameAccessor = FilenameAccessor(filename)
            if not filenameAccessor.pre in dirpath:
                move(filename, dirpath,
                     os.path.join(dirpath, filenameAccessor.pre))
            if len(filenameAccessor.primtags
                   ) > 0 and not filenameAccessor.primtags[0] in dirpath:
                move(filename, dirpath,
                     os.path.join(dirpath, *filenameAccessor.primtags))
예제 #10
0
def fake_date(start='2000:01:01', write=True, folder_dict: OrderedDict = None):
    """
    each file in a directory is one second later
    each dir is one day later
    :param start: the date on which to start generate fake dates
    :param write: whether should write or only print
    :param folder_dict: foldername to date
    """
    log_function_call(fake_date.__name__, start)
    inpath = os.getcwd()
    start_time_part = ' 06:00:00.000'
    start_time = giveDatetime(start + start_time_part)
    dir_counter = -1
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath): continue
        filenames = filterFiles(filenames, settings.image_types + settings.video_types)
        if not filenames or len(filenames) == 0: continue
        if folder_dict:
            dirpath_rel = os.path.relpath(dirpath, inpath)
            if not dirpath_rel in folder_dict:
                continue
            else:
                dirtime = giveDatetime(folder_dict[dirpath_rel] + start_time_part)
        else:
            dir_counter += 1
            dirtime = start_time + dt.timedelta(days=dir_counter)
        log().info(dirtime)
        secounds = 0
        minutes = 0
        lastname = ""
        for filename in filenames:
            if len(filename) == len(lastname) and remove_ext(filename)[:-2] == remove_ext(lastname)[:-2]:
                secounds += 1
            else:
                secounds = 0
                minutes += 1
            lastname = filename
            time = dirtime + dt.timedelta(minutes=minutes, seconds=secounds)

            time_string = dateformating(time, "YYYY:MM:DD HH:mm:ss")
            if write:
                # CreateDate is sometimes set and google fotos gives it precedence over DateTimeOriginal
                write_exiftag({"DateTimeOriginal": time_string}, dirpath, filename,
                              ["-DateCreated=", "-TimeCreated=", "-CreateDate=", "-Artist=", "-DigitalCreationDate=",
                               "-ModifyDate=", "-DateTimeDigitized="])
예제 #11
0
def read_exiftags(inpath="",
                  file_types: List[str] = settings.image_types,
                  skipdirs: List[str] = None,
                  ask=True,
                  ignore_model=False) -> Dict[str, list]:
    if not skipdirs:
        skipdirs = []
    if not inpath:
        inpath = os.getcwd()
    file_types = _get_distinct_filestypes(file_types)
    number_of_files = count_files_in(inpath, file_types, skipdirs)
    if number_of_files == 0:
        log().debug("no %s Files in %s, settings.includeSubdirs: %r",
                    file_types, inpath, settings.includeSubdirs)
        return {}
    log().info("process %d %s Files in %s, settings.includeSubdirs: %r",
               number_of_files, file_types, inpath, settings.includeSubdirs)
    if ask: askToContinue()

    clock = Clock()
    ListOfDicts = []
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, skipdirs): continue
        if count_files(filenames, file_types) == 0:
            log().info("  No matching files in %s",
                       os.path.relpath(dirpath, inpath))
            continue
        for filetype in file_types:
            if count_files(filenames, [filetype]) == 0:
                continue
            out, err = call_exiftool(dirpath, "*" + filetype, [], False)
            out = out[out.find("ExifTool Version Number"):]
            out_split = out.split("========")
            log().info("%4d tags of %s files extracted in %s", len(out_split),
                       filetype, os.path.relpath(dirpath, inpath))
            for tags in out_split:
                ListOfDicts.append(decode_exiftags(tags))

    outdict = listsOfDicts_to_dictOfLists(ListOfDicts)
    if not outdict: return {}
    outdict = sort_dict_by_date_and_model(outdict, ignore_model)
    clock.finish()
    return outdict
예제 #12
0
def create_rating_csv(rating: int = 4, subdir: str = ""):
    """
    creates a csv file with all files in the directory
    the rating column is filled with param rating
    :param rating: rating to be written
    :param subdir: sub directory to make rating file of, if empty all directories will be taken
    """
    log_function_call(create_rating_csv.__name__, rating, subdir)
    inpath = os.getcwd()
    out_filebasename = "rating"
    if subdir: out_filebasename += "_" + subdir
    out_filename = get_setexif_dir(out_filebasename + ".csv")
    rating_file, writer = fileop.create_csv_writer(out_filename,
                                                   ["name_part", "rating"])
    for (dirpath, dirnames, filenames) in os.walk(os.path.join(inpath,
                                                               subdir)):
        if is_invalid_path(dirpath): continue
        for filename in filterFiles(filenames, settings.image_types):
            writer.writerow([filename, rating])
    rating_file.close()
예제 #13
0
def resize(size=(128, 128)):
    """
    resize to icon like image
    :param size: size of resulting image
    """
    log_function_call(resize.__name__, size)

    inpath = os.getcwd()
    dest = os.path.join(inpath, "SMALL")
    os.mkdir(dest)
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, blacklist=["SMALL"]): continue
        for filename in filenames:
            if not file_has_ext(filename, ('.JPG', ".jpg")): continue
            # Load the original image:
            accessor = FilenameAccessor(filename)
            img = Image.open(os.path.join(dirpath, filename))
            img.thumbnail(size, Image.ANTIALIAS)
            accessor.processes.append("SMALL")
            outfile = os.path.join(dest, accessor.sorted_filename())
            img.save(outfile, 'JPEG', quality=90)
예제 #14
0
def first_date_per_folder() -> OrderedDict:
    """
    find files with missing exif data
    """
    log_function_call(first_date_per_folder.__name__)

    clock = Clock()
    inpath = os.getcwd()
    folder_dict = OrderedDict()

    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath): continue
        if fileop.count_files(filenames, settings.image_types) == 0: continue
        Tagdict = read_exiftags(dirpath, settings.image_types, ask=False)
        if len(list(Tagdict.values())) == 0: continue
        folder_dict[os.path.relpath(dirpath, inpath)] = [
            date for date in Tagdict["Date/Time Original"] if date
        ][0]

    clock.finish()
    return folder_dict
예제 #15
0
def create_names_csv_per_dir(start_after_dir=''):
    """
    extract names from the file path
    write a csv file with those names for each directory

    This csv can be modified to be used with :func:`write_exif_using_csv`
    If you want to modify it with EXCEL or Calc take care to import all columns of the csv as text.
    """
    log_function_call(create_names_csv_per_dir.__name__)
    inpath = os.getcwd()
    tag_set_names = OrderedSet()
    out_filename = get_info_dir("tags_names.csv")
    csvfile, writer = fileop.create_csv_writer(
        out_filename, ["directory", "name_main", "tags"])
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath): continue
        filenameAccessors = [
            FilenameAccessor(filename)
            for filename in filterFiles(filenames, image_types)
        ]
        if len(filenameAccessors) == 0: continue
        tags = []
        found = False
        for part in dirpath.split(os.sep):
            if found:
                tags += part.split(', ')
            else:
                found = part == start_after_dir
        filenameAccessorLast = filenameAccessors[0]
        tag_set_names.add(
            (", ".join(tags), filenameAccessorLast.pre,
             ', '.join(OrderedSet(tags + [filenameAccessorLast.pre]))))
        for filenameAccessor in filenameAccessors[1:]:
            if not filenameAccessor.pre == filenameAccessorLast.pre:
                tag_set_names.add(
                    (", ".join(tags), filenameAccessor.pre,
                     ', '.join(OrderedSet(tags + [filenameAccessor.pre]))))
            filenameAccessorLast = filenameAccessor
    writer.writerows(tag_set_names)
    csvfile.close()
예제 #16
0
def rename_HDR(mode="HDRT", folder=r"HDR\w*"):
    """
    rename HDR pictures generated by FRANZIS HDR projects to a nicer form
    :param mode: name for HDR-Mode written to file
    :param folder: only files in folders of this name are renamed
    """
    log_function_call(rename_HDR.__name__, mode, folder)
    matchreg = r"^([-\w]+_[0-9]+)B\d(.*)_(?:\d+B)?\d\2"
    inpath = os.getcwd()
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, regex=folder): continue
        log().info("Folder: %s", dirpath)
        for filename in filenames:
            if mode in filename: continue
            match = re.search(matchreg, filename)
            if match:
                _rename_match(dirpath, filename, mode, match)
            else:
                log().info("no match: %s", filename)
        for dirname in dirnames:
            match = re.search(matchreg, dirname)
            if match:
                _rename_match(dirpath, dirname, mode, match)
예제 #17
0
def filter_primary():
    """
    put single and B1 in same directory
    """
    log_function_call(filter_primary.__name__)
    inpath = os.getcwd()
    skipdirs = ["S", "SM", "TL", "mp4", "HDR", "single", "PANO", "others"]
    skipdirs += [model for model in c.CameraModelShort.values() if model]

    log().info(inpath)
    folders_to_main(dirs=["B" + str(i) for i in range(1, 8)])
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, skipdirs): continue
        log().info("%s #dirs:%d #files:%d", dirpath, len(dirnames),
                   len(filenames))
        filenames = moveSeries(dirpath, filenames, "S")
        filenames = moveSeries(dirpath, filenames, "SM")
        filenames = moveSeries(dirpath, filenames, "TL")
        filenames = move_media(dirpath, filenames, settings.video_types, "mp4")
        filenames = move_media(dirpath, filenames, ["HDR"], "HDR")
        filenames = moveSeries(dirpath, filenames, "B", "1", "primary")
        filenames = moveSeries(dirpath, filenames, "B")
        move_media(dirpath, filenames, settings.image_types, "primary")
예제 #18
0
def name_to_exif(folder=r"", additional_tags=(), startdir=None):
    """
    extract title, description and mode from name and write them to exif

    deprecated: try to use write_exif_using_csv() instead
    """
    inpath = os.getcwd()
    clock = Clock()
    file_types = settings.image_types + settings.video_types
    log().info("process %d Files in %s, subdir: %r", count_files_in(inpath, file_types, ""), inpath,
               settings.includeSubdirs)
    askToContinue()
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, regex=folder): continue
        filenames = filterFiles(filenames, file_types)
        for filename in filenames:
            meta_data = FileMetaData(dirpath, filename)
            if startdir:
                meta_data.import_fullname(startdir)
            else:
                meta_data.import_filename()
            meta_data.update({'tags': additional_tags})
            write_exiftag(meta_data.to_tag_dict(), dirpath, filename)
    clock.finish()
예제 #19
0
def write_exif_using_csv(csv_filenames: Union[str, List[str]] = "*", folder: str = r"", start_folder: str = "",
                         csv_folder: str = None, csv_restriction: str = "", import_filename: bool = True,
                         import_exif: bool = True,
                         only_when_changed: bool = False, overwrite_gps: bool = False, is_video: bool = False):
    """
    csv files are used for setting tags
    the csv files have to be separated by semicolon
    empty values in a column or not present columns are not evaluated
    each '' in the following is a possible column name

    can also be used without csv files at all just to import filename to tags

    following restrictions to files are possible:
        'directory': checks if value is part of directory
        'name_main': checks if value is the first part of filename
        'first': int counter min
        'last': int counter max
        'name_part': checks if value is part of filename

    :param csv_filenames:
        can be either "*" for all files in directory or a iterable of filenames

        can set follow exif information: ['title', 'tags', 'tags2', 'tags3', 'rating', 'description', 'gps']
            tags are expected to be separated by ', '
            rating is expected to be in interval [0,5]
            gps is expected to be 'lat, long' in decimal notation
        can set Location via ['Country', 'State', 'City', 'Location']

        sets also structured description for image processing like HDR and Panorama
        columns starting with
            'HDR' are evaluated as HDR description
            'TM' are evaluated as HDR Tone Mapping description
            'PANO' are evaluated as Panorama description
    :param csv_folder: location of csv files - standard is the .EXIFnaming/info
    :param csv_restriction: files that do not pass any of the restriction in this file are not modified at all
    :param folder: process only folders matching this regex
    :param start_folder: directories before this name will be ignored, does not needs to be a full directory name
    :param import_filename: whether to extract tags from filename
    :param import_exif: whether to extract tags from exif
    :param overwrite_gps: modifier for import_exif, overwrites gps data with information of csv
    :param only_when_changed: when true filename is not imported to tags for files without matching entries in csv
        useless if csv_restriction is set
    :param is_video: wheter video types should be written - video types might not handle tags right
    """
    if not csv_folder:
        csv_folder = get_setexif_dir()

    log_function_call(write_exif_using_csv.__name__, csv_filenames, folder, start_folder, csv_folder, csv_restriction,
                      import_filename, import_exif, only_when_changed, overwrite_gps)
    inpath = os.getcwd()
    clock = Clock()
    csv.register_dialect('semicolon', delimiter=';', lineterminator='\r\n')

    if csv_filenames == "*":
        csv_filenames = filterFiles(os.listdir(csv_folder), [".csv"])
    elif csv_filenames:
        csv_filenames = [csv_filename + ".csv" for csv_filename in csv_filenames]
    csv_filenames = [os.path.join(csv_folder, csv_filename) for csv_filename in csv_filenames]
    if csv_restriction:
        csv_restriction = os.path.join(csv_folder, csv_restriction) + ".csv"

    filetypes = settings.video_types if is_video else settings.image_types

    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, regex=folder, start=start_folder): continue
        for filename in filterFiles(filenames, filetypes):
            meta_data = FileMetaData(dirpath, filename)
            if not _passes_restrictor(meta_data, csv_restriction): continue
            if import_filename: meta_data.import_filename()
            if import_exif: meta_data.import_exif(overwrite_gps)

            for csv_filename in csv_filenames:
                with open(csv_filename, "r", encoding="utf-8") as csvfile:
                    reader = csv.DictReader(csvfile, dialect='semicolon')
                    for row in reader:
                        meta_data.update(row)

            if not only_when_changed or meta_data.has_changed:
                write_exiftag(meta_data.to_tag_dict(), meta_data.directory, meta_data.filename)

    clock.finish()
예제 #20
0
def find_bad_exif(do_move=True,
                  check_date_additional=False,
                  folder: str = r""):
    """
    find files with missing exif data
    """
    log_function_call(find_bad_exif.__name__, do_move)

    clock = Clock()
    inpath = os.getcwd()
    lines_no_tags = OrderedSet()
    lines_bad_date_additional = OrderedSet()
    lines_date_missing = OrderedSet()
    out_filename_no_tags = get_info_dir("no_tags.csv")
    file_no_tags, writer_no_tags = fileop.create_csv_writer(
        out_filename_no_tags, ["directory", "name_part"])
    out_filename_bad_date_additional = get_info_dir("bad_date_additional.csv")
    file_bad_date_additional, writer_bad_date_additional = fileop.create_csv_writer(
        out_filename_bad_date_additional, ["directory", "name_part"])
    out_filename_date_missing = get_info_dir("date_missing.csv")
    file_date_missing, writer_date_missing = fileop.create_csv_writer(
        out_filename_date_missing, ["directory", "name_part"])
    for (dirpath, dirnames, filenames) in os.walk(inpath):
        if is_invalid_path(dirpath, regex=folder): continue
        if fileop.count_files(filenames, settings.image_types) == 0: continue
        Tagdict = read_exiftags(dirpath, settings.image_types, ask=False)
        if len(list(Tagdict.values())) == 0: continue
        leng = len(list(Tagdict.values())[0])
        for i in range(leng):
            if (not "Keywords" in Tagdict or not Tagdict["Keywords"][i]) or \
                    (not "Subject" in Tagdict or not Tagdict["Subject"][i]) or \
                    (not "Description" in Tagdict or not Tagdict["Description"][i]) or \
                    (not "User Comment" in Tagdict or not Tagdict["User Comment"][i]):
                lines_no_tags.add((os.path.basename(dirpath),
                                   _remove_counter(Tagdict["File Name"][i])))
                if do_move and not "bad_exif" in dirpath:
                    move(
                        Tagdict["File Name"][i], dirpath,
                        dirpath.replace(
                            inpath, os.path.join(inpath, "bad_exif_keywords")))
            if not "Date/Time Original" in Tagdict or not Tagdict[
                    "Date/Time Original"][i]:
                lines_date_missing.add(
                    (os.path.basename(dirpath),
                     _remove_counter(Tagdict["File Name"][i])))
                if do_move and not "bad_exif" in dirpath:
                    move(
                        Tagdict["File Name"][i], dirpath,
                        dirpath.replace(
                            inpath,
                            os.path.join(inpath, "bad_exif_date_missing")))
            if check_date_additional and \
                    (("Date Created" in Tagdict and Tagdict["Date Created"][i]) or
                     ("Time Created" in Tagdict and Tagdict["Time Created"][i]) or
                     ("Create Date" in Tagdict and Tagdict["Create Date"][i]) or
                     ("Modify Date" in Tagdict and Tagdict["Modify Date"][i]) or
                     ("Digital Creation Date" in Tagdict and Tagdict["Digital Creation Date"][i])):
                lines_bad_date_additional.add(
                    (os.path.basename(dirpath),
                     _remove_counter(Tagdict["File Name"][i])))
                if do_move and not "bad_exif" in dirpath:
                    move(
                        Tagdict["File Name"][i], dirpath,
                        dirpath.replace(
                            inpath,
                            os.path.join(inpath, "bad_exif_date_additional")))
    writer_no_tags.writerows(lines_no_tags)
    writer_bad_date_additional.writerows(lines_bad_date_additional)
    writer_date_missing.writerows(lines_date_missing)
    file_no_tags.close()
    file_bad_date_additional.close()
    file_date_missing.close()
    clock.finish()