def order(): """ order by date using exif info """ log_function_call(order.__name__) inpath = os.getcwd() Tagdict = read_exiftags(file_types=settings.image_types, ignore_model=True) timeJumpDetector = TimeJumpDetector() time_old = giveDatetime() dircounter = 1 filenames = [] leng = len(list(Tagdict.values())[0]) dirNameDict_firsttime = OrderedDict() dirNameDict_lasttime = OrderedDict() time = giveDatetime(create_model(Tagdict, 0).get_date()) daystring = dateformating(time, "YYMMDD_") dirName = daystring + "%02d" % dircounter dirNameDict_firsttime[time] = dirName log().info('Number of JPG: %d', leng) for i in range(leng): model = create_model(Tagdict, i) time = giveDatetime(model.get_date()) if timeJumpDetector.isJump(time, len(filenames)): dirNameDict_lasttime[time_old] = dirName moveFiles(filenames, os.path.join(inpath, dirName)) filenames = [] if newdate(time, time_old): daystring = dateformating(time, "YYMMDD_") dircounter = 1 else: dircounter += 1 dirName = daystring + "%02d" % dircounter dirNameDict_firsttime[time] = dirName filenames.append((model.dir, model.filename)) time_old = time dirNameDict_lasttime[time_old] = dirName moveFiles(filenames, os.path.join(inpath, dirName)) print_firstlast_of_dirname(dirNameDict_firsttime, dirNameDict_lasttime) Tagdict_mp4 = read_exiftags(file_types=settings.video_types) if len(Tagdict_mp4) == 0: return leng = len(list(Tagdict_mp4.values())[0]) log().info('Number of mp4: %d', leng) for i in range(leng): model = create_model(Tagdict_mp4, i) time = giveDatetime(model.get_date()) dirName = find_dir_with_closest_time(dirNameDict_firsttime, dirNameDict_lasttime, time) if dirName: move(model.filename, model.dir, os.path.join(inpath, dirName, "mp4")) else: log().warning("Did not move %s to %s", model.filename, dirName)
def rename_back(timestring="", fileext=".JPG"): """ rename back using backup in saves; change to directory you want to rename back :param timestring: time of backup :param fileext: file extension :return: """ log_function_call(rename_back.__name__) dirname = get_saves_dir() tagFile = os.path.join(dirname, "Tags" + fileext + "_" + timestring + ".npz") if not timestring or os.path.isfile(tagFile): tagFiles = [x for x in os.listdir(dirname) if ".npz" in x] tagFile = os.path.join(dirname, tagFiles[-1]) Tagdict = np.load(tagFile, allow_pickle=True)["Tagdict"].item() temppostfix = renameTemp(Tagdict["Directory"], Tagdict["File Name new"]) log().debug("length of Tagdict: %d", len(list(Tagdict.values())[0])) for i in range(len(list(Tagdict.values())[0])): filename = Tagdict["File Name new"][i] + temppostfix if not os.path.isfile(os.path.join(Tagdict["Directory"][i], filename)): continue filename_old = Tagdict["File Name"][i] renameInPlace(Tagdict["Directory"][i], filename, filename_old) Tagdict["File Name new"][i], Tagdict["File Name"][i] = Tagdict[ "File Name"][i], Tagdict["File Name new"][i] timestring = dateformating(dt.datetime.now(), "_MMDDHHmmss") np.savez_compressed(os.path.join(dirname, "Tags" + fileext + timestring), Tagdict=Tagdict)
def rename_in_csvs(timestring="", fileext=".JPG"): """ use backup in saves; rename file reverences in csv in setexif directory :param timestring: time of backup :param fileext: file extension """ log_function_call(rename_in_csvs.__name__) dirname = get_saves_dir() tagFile = os.path.join(dirname, "Tags" + fileext + "_" + timestring + ".npz") if not timestring or os.path.isfile(tagFile): tagFiles = [x for x in os.listdir(dirname) if ".npz" in x] tagFile = os.path.join(dirname, tagFiles[-1]) Tagdict = np.load(tagFile, allow_pickle=True)["Tagdict"].item() log().debug("length of Tagdict: %d", len(list(Tagdict.values())[0])) csv_filenames = filterFiles(os.listdir(get_setexif_dir()), [".csv"]) csv_filenames = [ os.path.join(get_setexif_dir(), csv_filename) for csv_filename in csv_filenames ] for csv_filename in csv_filenames: with open(csv_filename, "r", encoding="utf-8") as file: data = file.read() for i in range(len(list(Tagdict.values())[0])): data = data.replace(Tagdict["File Name"][i], Tagdict["File Name new"][i]) with open(csv_filename, "w", encoding="utf-8") as file: file.write(data)
def shift_time(hours: int = 0, minutes: int = 0, seconds: int = 0, is_video: bool = False): """ shift DateTimeOriginal to correct for wrong camera time setting :example: to adjust time zone by one, set hours=-1 :param hours: hours to shift :param minutes: minutes to shift :param seconds: seconds to shift :param is_video: whether to modify videos, if false modifies pictures """ log_function_call(shift_time.__name__, hours, minutes, seconds, is_video) inpath = os.getcwd() delta_t = dt.timedelta(hours=hours, minutes=minutes, seconds=seconds) Tagdict = read_exiftags(inpath, settings.video_types if is_video else settings.image_types) if has_not_keys(Tagdict, keys=["Directory", "File Name", "Date/Time Original"]): return leng = len(list(Tagdict.values())[0]) time_tags = ["DateTimeOriginal", "CreateDate", "ModifyDate"] time_tags_mp4 = ["TrackCreateDate", "TrackModifyDate", "MediaCreateDate", "MediaModifyDate"] dir_change_printer = DirChangePrinter(Tagdict["Directory"][0]) for i in range(leng): model = create_model(Tagdict, i) time = giveDatetime(model.get_date()) newtime = time + delta_t timestring = dateformating(newtime, "YYYY:MM:DD HH:mm:ss") outTagDict = {} for time_tag in time_tags: outTagDict[time_tag] = timestring if is_video: for time_tag in time_tags_mp4: outTagDict[time_tag] = timestring write_exiftag(outTagDict, model.dir, model.filename) dir_change_printer.update(model.dir) dir_change_printer.finish()
def create_counters_csv_per_dir(): """ extract counter from the file name write a csv file with those counters for each directory This csv can be modified to be used with :func:`write_exif_using_csv` If you want to modify it with EXCEL or Calc take care to import all columns of the csv as text. """ log_function_call(create_tags_csv_per_dir.__name__) inpath = os.getcwd() tag_set_names = OrderedSet() out_filename = get_info_dir("tags_counters.csv") csvfile, writer = fileop.create_csv_writer(out_filename, [ "directory", "name_main", "name_part", "first", "last", "tags3", "description" ]) for (dirpath, dirnames, filenames) in os.walk(inpath): if not inpath == dirpath: continue for dirname in dirnames: filenameAccessors = [ FilenameAccessor(filename) for filename in get_plain_filenames_of_type( image_types, dirpath, dirname) ] if len(filenameAccessors) == 0: continue _add_counter_csv_entries(dirname, filenameAccessors, tag_set_names) writer.writerows(tag_set_names) csvfile.close()
def filter_series(): """ put each kind of series in its own directory """ log_function_call(filter_series.__name__) inpath = os.getcwd() skipdirs = ["B" + str(i) for i in range(1, 8)] skipdirs += [ "S", "SM", "TL", "mp4", "HDR", "single", "PANO", "others", "TLM" ] # TLM: Timelapse manual - pictures on different days to be combined to a Timelapse skipdirs += [model for model in c.CameraModelShort.values() if model] log().info(inpath) for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, skipdirs): continue log().info("%s #dirs:%d #files:%d", dirpath, len(dirnames), len(filenames)) filenames = moveBracketSeries(dirpath, filenames) filenames = moveSeries(dirpath, filenames, "S") filenames = moveSeries(dirpath, filenames, "SM") filenames = moveSeries(dirpath, filenames, "TL") filenames = move_media(dirpath, filenames, settings.video_types, "mp4") # filter process types to separate folders - attention: ordering of statements matters filenames = move_media(dirpath, filenames, ["PANO"], "PANO") filenames = move_media(dirpath, filenames, ["ANIMA"], "ANIMA") filenames = move_media(dirpath, filenames, ["RET"], "RET") filenames = move_media(dirpath, filenames, ["ZOOM"], "ZOOM") filenames = move_media(dirpath, filenames, ["SMALL"], "SMALL") filenames = move_media(dirpath, filenames, ["CUT"], "CUT") filenames = move_media(dirpath, filenames, ["HDR"], "HDR") move_media(dirpath, filenames, settings.image_types, "single")
def geotag(timezone: int = 2, offset: str = "", start_folder: str = ""): """ adds gps information to all pictures in all sub directories of current directory the gps information is obtained from gpx files, that are expected to be in a folder called ".gps" :param timezone: number of hours offset :param offset: offset in minutes and seconds, has to be in format +/-mm:ss e.g. -03:02 :param start_folder: directories before this name will be ignored, does not needs to be a full directory name """ log_function_call(geotag.__name__, offset, start_folder) inpath = os.getcwd() gpxDir = get_gps_dir() options = ["-r", "-geotime<${DateTimeOriginal}%+03d:00" % timezone] if offset: options.append("-geosync=" + offset) for (dirpath, dirnames, filenames) in os.walk(gpxDir): if not gpxDir == dirpath: break for filename in filenames: if not filename.endswith(".gpx"): continue options.append("-geotag") options.append(os.path.join(gpxDir, filename)) found_any = False for (dirpath, dirnames, filenames) in os.walk(inpath): if not inpath == dirpath: break for dirname in dirnames: if dirname.startswith("."): continue if dirname < start_folder: continue log().info(dirname) found_any = True call_exiftool(inpath, dirname, options=options) if not found_any: call_exiftool(inpath, options=options)
def folders_to_main(series: bool = False, primary: bool = False, blurry: bool = False, dirs: list = None, one_level: bool = True, not_inpath: bool = True): """ reverses filtering/sorting into directories :param series: restrict to reverse of filterSeries :param primary: restrict to reverse of filterPrimary :param blurry: restrict to reverse of detectBlurry :param dirs: restrict to reverse other dirs :param one_level: reverse only one directory up :param not_inpath: leave all directories in inpath as they are, only change subdirectories """ log_function_call(folders_to_main.__name__, series, primary, blurry, dirs, one_level, not_inpath) inpath = os.getcwd() reverseDirs = [] if series: reverseDirs += ["B" + str(i) for i in range(1, 8)] + ["S", "single"] if primary: reverseDirs += ["B", "S", "TL", "SM", "primary"] if blurry: reverseDirs += ["blurry"] if dirs: reverseDirs += list(dirs) deepest = 0 for (dirpath, dirnames, filenames) in os.walk(inpath): if not_inpath and dirpath == inpath: continue if is_invalid_path(dirpath, whitelist=reverseDirs): continue depth = get_relpath_depth(dirpath, inpath) deepest = max(deepest, depth) if not_inpath: deepest -= 1 if not reverseDirs and deepest > 1: log().warning( "A folder structure with a depth of %2d will be flattened", deepest) askToContinue() elif deepest > 3: log().warning("The folder structure has a depth of %2d", deepest) log().info("chosen directory names: %r", reverseDirs) askToContinue() for (dirpath, dirnames, filenames) in os.walk(inpath): if not_inpath and dirpath == inpath: continue if is_invalid_path(dirpath, whitelist=reverseDirs): continue if one_level: destination = os.path.dirname(dirpath) else: destination = inpath log().info("%s #dirs:%d #files:%d", dirpath, len(dirnames), len(filenames)) for filename in filenames: if not file_has_ext(filename, settings.image_types + settings.video_types): continue move(filename, dirpath, destination) removeIfEmtpy(dirpath)
def rotate(subname: str = "", folder: str = r"", sign=1, override=True, ask=True): """ rotate back according to tag information (Rotate 90 CW or Rotate 270 CW) Some programs like franzis hdr projects rotate the resolution of the picture -> picture gets upward resolution and shown as rotated two times. This function reverses the resolution rotation according to exif info. Pictures that either have no rotation according to exif or have a normal resolution ratio are not modified. So calling it a second time wont change anything. :param subname: only files that contain this name are rotated, empty string: no restriction :param sign: direction of rotation :param folder: only files in directories that match this regex are rotated, empty string: no restriction :param override: override file with rotated one :param ask: if should ask for user confirmation """ log_function_call(rotate.__name__, subname, folder, sign, override, ask) from PIL import Image NFiles = 0 clock = Clock() inpath = os.getcwd() for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, regex=folder): continue if len(filenames) == 0: continue Tagdict = read_exiftags(dirpath, settings.image_types, ask=ask) if has_not_keys(Tagdict, keys=["Orientation"]): continue leng = len(list(Tagdict.values())[0]) for i in range(leng): # Load the original image: model = create_model(Tagdict, i) if not subname in model.filename: continue if model.is_rotated_by(0) or not model.is_upward(): continue name = model.get_path() log().info("rotate %s", model.filename) img = Image.open(name) if model.is_rotated_by(90): img_rot = img.rotate(90 * sign, expand=True) elif model.is_rotated_by(-90): img_rot = img.rotate(-90 * sign, expand=True) else: continue NFiles += 1 if not override: name = name[:name.rfind(".")] + "_ROTATED" + name[name. rfind("."):] img_rot.save(name, 'JPEG', quality=99, exif=img.info['exif']) clock.finish()
def move_each_pretag_to_folder(): """ """ log_function_call(move_each_pretag_to_folder.__name__) inpath = os.getcwd() for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath): continue for filename in filenames: filenameAccessor = FilenameAccessor(filename) if not filenameAccessor.pre in dirpath: move(filename, dirpath, os.path.join(dirpath, filenameAccessor.pre)) if len(filenameAccessor.primtags ) > 0 and not filenameAccessor.primtags[0] in dirpath: move(filename, dirpath, os.path.join(dirpath, *filenameAccessor.primtags))
def fake_date(start='2000:01:01', write=True, folder_dict: OrderedDict = None): """ each file in a directory is one second later each dir is one day later :param start: the date on which to start generate fake dates :param write: whether should write or only print :param folder_dict: foldername to date """ log_function_call(fake_date.__name__, start) inpath = os.getcwd() start_time_part = ' 06:00:00.000' start_time = giveDatetime(start + start_time_part) dir_counter = -1 for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath): continue filenames = filterFiles(filenames, settings.image_types + settings.video_types) if not filenames or len(filenames) == 0: continue if folder_dict: dirpath_rel = os.path.relpath(dirpath, inpath) if not dirpath_rel in folder_dict: continue else: dirtime = giveDatetime(folder_dict[dirpath_rel] + start_time_part) else: dir_counter += 1 dirtime = start_time + dt.timedelta(days=dir_counter) log().info(dirtime) secounds = 0 minutes = 0 lastname = "" for filename in filenames: if len(filename) == len(lastname) and remove_ext(filename)[:-2] == remove_ext(lastname)[:-2]: secounds += 1 else: secounds = 0 minutes += 1 lastname = filename time = dirtime + dt.timedelta(minutes=minutes, seconds=secounds) time_string = dateformating(time, "YYYY:MM:DD HH:mm:ss") if write: # CreateDate is sometimes set and google fotos gives it precedence over DateTimeOriginal write_exiftag({"DateTimeOriginal": time_string}, dirpath, filename, ["-DateCreated=", "-TimeCreated=", "-CreateDate=", "-Artist=", "-DigitalCreationDate=", "-ModifyDate=", "-DateTimeDigitized="])
def create_rating_csv(rating: int = 4, subdir: str = ""): """ creates a csv file with all files in the directory the rating column is filled with param rating :param rating: rating to be written :param subdir: sub directory to make rating file of, if empty all directories will be taken """ log_function_call(create_rating_csv.__name__, rating, subdir) inpath = os.getcwd() out_filebasename = "rating" if subdir: out_filebasename += "_" + subdir out_filename = get_setexif_dir(out_filebasename + ".csv") rating_file, writer = fileop.create_csv_writer(out_filename, ["name_part", "rating"]) for (dirpath, dirnames, filenames) in os.walk(os.path.join(inpath, subdir)): if is_invalid_path(dirpath): continue for filename in filterFiles(filenames, settings.image_types): writer.writerow([filename, rating]) rating_file.close()
def resize(size=(128, 128)): """ resize to icon like image :param size: size of resulting image """ log_function_call(resize.__name__, size) inpath = os.getcwd() dest = os.path.join(inpath, "SMALL") os.mkdir(dest) for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, blacklist=["SMALL"]): continue for filename in filenames: if not file_has_ext(filename, ('.JPG', ".jpg")): continue # Load the original image: accessor = FilenameAccessor(filename) img = Image.open(os.path.join(dirpath, filename)) img.thumbnail(size, Image.ANTIALIAS) accessor.processes.append("SMALL") outfile = os.path.join(dest, accessor.sorted_filename()) img.save(outfile, 'JPEG', quality=90)
def first_date_per_folder() -> OrderedDict: """ find files with missing exif data """ log_function_call(first_date_per_folder.__name__) clock = Clock() inpath = os.getcwd() folder_dict = OrderedDict() for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath): continue if fileop.count_files(filenames, settings.image_types) == 0: continue Tagdict = read_exiftags(dirpath, settings.image_types, ask=False) if len(list(Tagdict.values())) == 0: continue folder_dict[os.path.relpath(dirpath, inpath)] = [ date for date in Tagdict["Date/Time Original"] if date ][0] clock.finish() return folder_dict
def create_names_csv_per_dir(start_after_dir=''): """ extract names from the file path write a csv file with those names for each directory This csv can be modified to be used with :func:`write_exif_using_csv` If you want to modify it with EXCEL or Calc take care to import all columns of the csv as text. """ log_function_call(create_names_csv_per_dir.__name__) inpath = os.getcwd() tag_set_names = OrderedSet() out_filename = get_info_dir("tags_names.csv") csvfile, writer = fileop.create_csv_writer( out_filename, ["directory", "name_main", "tags"]) for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath): continue filenameAccessors = [ FilenameAccessor(filename) for filename in filterFiles(filenames, image_types) ] if len(filenameAccessors) == 0: continue tags = [] found = False for part in dirpath.split(os.sep): if found: tags += part.split(', ') else: found = part == start_after_dir filenameAccessorLast = filenameAccessors[0] tag_set_names.add( (", ".join(tags), filenameAccessorLast.pre, ', '.join(OrderedSet(tags + [filenameAccessorLast.pre])))) for filenameAccessor in filenameAccessors[1:]: if not filenameAccessor.pre == filenameAccessorLast.pre: tag_set_names.add( (", ".join(tags), filenameAccessor.pre, ', '.join(OrderedSet(tags + [filenameAccessor.pre])))) filenameAccessorLast = filenameAccessor writer.writerows(tag_set_names) csvfile.close()
def create_tags_csv_per_dir(): """ extract tags from the file name write a csv file with those tags and group them by toplevel directory This csv can be modified to be used with :func:`write_exif_using_csv` or :func:`placeinfo.write_infos` If you want to modify it with EXCEL or Calc take care to import all columns of the csv as text. """ log_function_call(create_tags_csv_per_dir.__name__) inpath = os.getcwd() tag_set_names = OrderedSet() out_filename = get_info_dir("tags_per_dir.csv") tags_places_file, writer = fileop.create_csv_writer( out_filename, ["directory", "name_part"]) for (dirpath, dirnames, filenames) in os.walk(inpath): if not inpath == dirpath: continue for dirname in dirnames: tag_set = OrderedSet() filenameAccessors = [ FilenameAccessor(filename) for filename in get_plain_filenames_of_type( image_types, dirpath, dirname) ] if len(filenameAccessors) == 0: continue for fileNameAccessor in filenameAccessors: for tag in fileNameAccessor.tags(): tag_set.add(tag) writeToFile(get_info_dir("tags.txt"), dirname + "\n\t" + "\n\t".join(tag_set) + "\n") dirname_split = dirname.split("_") subnames = [ subname for subname in dirname_split if not subname.isnumeric() ] dirname = "_".join(subnames) for tag in tag_set: tag_set_names.add((dirname, tag)) writer.writerows(tag_set_names) tags_places_file.close()
def filter_primary(): """ put single and B1 in same directory """ log_function_call(filter_primary.__name__) inpath = os.getcwd() skipdirs = ["S", "SM", "TL", "mp4", "HDR", "single", "PANO", "others"] skipdirs += [model for model in c.CameraModelShort.values() if model] log().info(inpath) folders_to_main(dirs=["B" + str(i) for i in range(1, 8)]) for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, skipdirs): continue log().info("%s #dirs:%d #files:%d", dirpath, len(dirnames), len(filenames)) filenames = moveSeries(dirpath, filenames, "S") filenames = moveSeries(dirpath, filenames, "SM") filenames = moveSeries(dirpath, filenames, "TL") filenames = move_media(dirpath, filenames, settings.video_types, "mp4") filenames = move_media(dirpath, filenames, ["HDR"], "HDR") filenames = moveSeries(dirpath, filenames, "B", "1", "primary") filenames = moveSeries(dirpath, filenames, "B") move_media(dirpath, filenames, settings.image_types, "primary")
def order_with_timetable(timefile: str = None): """ use timetable to create folder structure :param timefile: timetable file :return: """ if not timefile: timefile = get_info_dir("timetable.txt") log_function_call(order_with_timetable.__name__, timefile) dirNameDict_firsttime, dirNameDict_lasttime = _read_timetable(timefile) Tagdict = read_exiftags() leng = len(list(Tagdict.values())[0]) log().info('Number of jpg: %d', leng) for i in range(leng): model = create_model(Tagdict, i) time = giveDatetime(model.get_date()) dirName = find_dir_with_closest_time(dirNameDict_firsttime, dirNameDict_lasttime, time) if dirName: move(model.filename, model.dir, os.path.join(os.getcwd(), dirName))
def rename_HDR(mode="HDRT", folder=r"HDR\w*"): """ rename HDR pictures generated by FRANZIS HDR projects to a nicer form :param mode: name for HDR-Mode written to file :param folder: only files in folders of this name are renamed """ log_function_call(rename_HDR.__name__, mode, folder) matchreg = r"^([-\w]+_[0-9]+)B\d(.*)_(?:\d+B)?\d\2" inpath = os.getcwd() for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, regex=folder): continue log().info("Folder: %s", dirpath) for filename in filenames: if mode in filename: continue match = re.search(matchreg, filename) if match: _rename_match(dirpath, filename, mode, match) else: log().info("no match: %s", filename) for dirname in dirnames: match = re.search(matchreg, dirname) if match: _rename_match(dirpath, dirname, mode, match)
def copy_exif_via_mainname(origin: str, target: str, overwriteDateTime: bool = False, file_types: Iterable = settings.image_types): """ copy exif information from files in directory origin to target files are matched via main name -> processed files can get exif information of original files :param overwriteDateTime: whether to overwrite already exiting "Date/Time Original" :param origin: where exif infos should be read :param target: where exif infos should be written to :param file_types: of target files, default: all image types """ log_function_call(copy_exif_via_mainname.__name__, origin, target) inpath = os.getcwd() target_dict = {} exclusion_tags = ["--PreviewImage", "--ThumbnailImage", "--Rating"] command = "-TagsFromFile" for (dirpath, dirnames, filenames) in os.walk(os.path.join(inpath, target)): if is_invalid_path(dirpath): continue filenames = filterFiles(filenames, file_types) for filename in filenames: if not overwriteDateTime: tagDict = read_exiftag(dirpath, filename) if hasDateTime(tagDict): continue main = FilenameAccessor(filename).mainname() target_dict.setdefault(main, []).append(os.path.join(dirpath, filename)) for (dirpath, dirnames, filenames) in os.walk(os.path.join(inpath, origin)): if is_invalid_path(dirpath): continue filenames = filterFiles(filenames, settings.image_types) for filename in filenames: main = FilenameAccessor(filename).mainname() if not main in target_dict: continue if filename in [os.path.basename(target_file) for target_file in target_dict[main]]: continue orgin_file = os.path.join(dirpath, filename) for target_file in target_dict[main]: commands = [command, orgin_file, target_file] call_exiftool_direct(exclusion_tags + commands) del target_dict[main]
def write_exif_using_csv(csv_filenames: Union[str, List[str]] = "*", folder: str = r"", start_folder: str = "", csv_folder: str = None, csv_restriction: str = "", import_filename: bool = True, import_exif: bool = True, only_when_changed: bool = False, overwrite_gps: bool = False, is_video: bool = False): """ csv files are used for setting tags the csv files have to be separated by semicolon empty values in a column or not present columns are not evaluated each '' in the following is a possible column name can also be used without csv files at all just to import filename to tags following restrictions to files are possible: 'directory': checks if value is part of directory 'name_main': checks if value is the first part of filename 'first': int counter min 'last': int counter max 'name_part': checks if value is part of filename :param csv_filenames: can be either "*" for all files in directory or a iterable of filenames can set follow exif information: ['title', 'tags', 'tags2', 'tags3', 'rating', 'description', 'gps'] tags are expected to be separated by ', ' rating is expected to be in interval [0,5] gps is expected to be 'lat, long' in decimal notation can set Location via ['Country', 'State', 'City', 'Location'] sets also structured description for image processing like HDR and Panorama columns starting with 'HDR' are evaluated as HDR description 'TM' are evaluated as HDR Tone Mapping description 'PANO' are evaluated as Panorama description :param csv_folder: location of csv files - standard is the .EXIFnaming/info :param csv_restriction: files that do not pass any of the restriction in this file are not modified at all :param folder: process only folders matching this regex :param start_folder: directories before this name will be ignored, does not needs to be a full directory name :param import_filename: whether to extract tags from filename :param import_exif: whether to extract tags from exif :param overwrite_gps: modifier for import_exif, overwrites gps data with information of csv :param only_when_changed: when true filename is not imported to tags for files without matching entries in csv useless if csv_restriction is set :param is_video: wheter video types should be written - video types might not handle tags right """ if not csv_folder: csv_folder = get_setexif_dir() log_function_call(write_exif_using_csv.__name__, csv_filenames, folder, start_folder, csv_folder, csv_restriction, import_filename, import_exif, only_when_changed, overwrite_gps) inpath = os.getcwd() clock = Clock() csv.register_dialect('semicolon', delimiter=';', lineterminator='\r\n') if csv_filenames == "*": csv_filenames = filterFiles(os.listdir(csv_folder), [".csv"]) elif csv_filenames: csv_filenames = [csv_filename + ".csv" for csv_filename in csv_filenames] csv_filenames = [os.path.join(csv_folder, csv_filename) for csv_filename in csv_filenames] if csv_restriction: csv_restriction = os.path.join(csv_folder, csv_restriction) + ".csv" filetypes = settings.video_types if is_video else settings.image_types for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, regex=folder, start=start_folder): continue for filename in filterFiles(filenames, filetypes): meta_data = FileMetaData(dirpath, filename) if not _passes_restrictor(meta_data, csv_restriction): continue if import_filename: meta_data.import_filename() if import_exif: meta_data.import_exif(overwrite_gps) for csv_filename in csv_filenames: with open(csv_filename, "r", encoding="utf-8") as csvfile: reader = csv.DictReader(csvfile, dialect='semicolon') for row in reader: meta_data.update(row) if not only_when_changed or meta_data.has_changed: write_exiftag(meta_data.to_tag_dict(), meta_data.directory, meta_data.filename) clock.finish()
def rename(Prefix="", dateformat='YYMM-DD', startindex=1, onlyprint=False, keeptags=True, is_video=False, name=""): """ Rename into Format: [Prefix][dateformat](_[name])_[Filenumber][SeriesType][SeriesSubNumber]_[PhotoMode] :param Prefix: prefix has to fulfil regex [-a-zA-Z]* :param dateformat: Y:Year,M:Month,D:Day,N:DayCounter; Number of occurrences determine number of digits :param startindex: minimal counter :param onlyprint: do not rename; only output file of proposed renaming into saves directory :param keeptags: any tags - name or postfixes will be preserved :param is_video: is video file extension :param name: optional name between date and filenumber, seldom used """ log_function_call(rename.__name__, Prefix, dateformat, startindex, onlyprint, keeptags, is_video, name) Tagdict = read_exiftags( file_types=settings.video_types if is_video else settings.image_types) if not Tagdict: return # rename temporary if not onlyprint: temppostfix = renameTemp(Tagdict["Directory"], Tagdict["File Name"]) else: temppostfix = "" # initialize outstring = "" Tagdict["File Name new"] = [] time_old = giveDatetime() counter = startindex - 1 digits = _count_files_for_each_date(Tagdict, startindex, dateformat) number_of_files = len(list(Tagdict.values())[0]) daystring = "" for i in range(number_of_files): model = create_model(Tagdict, i) time = giveDatetime(model.get_date()) filename = model.filename if newdate(time, time_old, 'D' in dateformat or 'N' in dateformat): daystring = dateformating(time, dateformat) if not i == 0: counter = 0 filenameBuilder = FilenameBuilder(filename) filenameBuilder.add_main(Prefix + daystring) filenameBuilder.add_main(model.get_model_abbr()) filenameBuilder.add_main(name) if is_video: counter += 1 counterString = "M%02d" % counter filenameBuilder.add_post(model.get_recMode()) else: # SequenceNumber sequence_number = model.get_sequence_number() if sequence_number < 2 and not time == time_old or model.ignore_same_date( ): counter += 1 counterString = ("%0" + digits + "d") % counter if not "HDR" in filename: counterString += model.get_sequence_string() filenameBuilder.add_main(counterString) filenameBuilder.add_post(model.get_mode()) if keeptags: filenameBuilder.use_old_tags() newname = filenameBuilder.build() # handle already exiting filename - its ok when they are in different directories # its not ok to have to files with one uppercase and another lowercase -> equals with ignore case if len(Tagdict["File Name new"]) > 0: if newname.lower() == Tagdict["File Name new"][-1].lower() and \ Tagdict["Directory"][i].lower() == Tagdict["Directory"][i - 1].lower(): log().warning("%s already exists - postfix it with V%d", os.path.join(model.dir, newname), 1) newname = filenameBuilder.set_version("V%d" % 1).build() for version in range(2, 100): indexes_samename = [ i for i, name in enumerate(Tagdict["File Name new"]) if name.lower() == newname.lower() ] if indexes_samename: if Tagdict["Directory"][i] == Tagdict["Directory"][ indexes_samename[0]]: log().warning( "%s already exists - postfix it with V%d", os.path.join(model.dir, newname), version) newname = filenameBuilder.set_version("V%d" % version).build() else: break else: break time_old = time Tagdict["File Name new"].append(newname) outstring += _write(model.dir, filename, temppostfix, newname, onlyprint) dirname = get_saves_dir() timestring = dateformating(dt.datetime.now(), "_MMDDHHmmss") video_str = "_video" if is_video else "" np.savez_compressed(os.path.join(dirname, "Tags" + video_str + timestring), Tagdict=Tagdict) writeToFile( os.path.join(dirname, "newnames" + video_str + timestring + ".txt"), outstring)
def find_bad_exif(do_move=True, check_date_additional=False, folder: str = r""): """ find files with missing exif data """ log_function_call(find_bad_exif.__name__, do_move) clock = Clock() inpath = os.getcwd() lines_no_tags = OrderedSet() lines_bad_date_additional = OrderedSet() lines_date_missing = OrderedSet() out_filename_no_tags = get_info_dir("no_tags.csv") file_no_tags, writer_no_tags = fileop.create_csv_writer( out_filename_no_tags, ["directory", "name_part"]) out_filename_bad_date_additional = get_info_dir("bad_date_additional.csv") file_bad_date_additional, writer_bad_date_additional = fileop.create_csv_writer( out_filename_bad_date_additional, ["directory", "name_part"]) out_filename_date_missing = get_info_dir("date_missing.csv") file_date_missing, writer_date_missing = fileop.create_csv_writer( out_filename_date_missing, ["directory", "name_part"]) for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, regex=folder): continue if fileop.count_files(filenames, settings.image_types) == 0: continue Tagdict = read_exiftags(dirpath, settings.image_types, ask=False) if len(list(Tagdict.values())) == 0: continue leng = len(list(Tagdict.values())[0]) for i in range(leng): if (not "Keywords" in Tagdict or not Tagdict["Keywords"][i]) or \ (not "Subject" in Tagdict or not Tagdict["Subject"][i]) or \ (not "Description" in Tagdict or not Tagdict["Description"][i]) or \ (not "User Comment" in Tagdict or not Tagdict["User Comment"][i]): lines_no_tags.add((os.path.basename(dirpath), _remove_counter(Tagdict["File Name"][i]))) if do_move and not "bad_exif" in dirpath: move( Tagdict["File Name"][i], dirpath, dirpath.replace( inpath, os.path.join(inpath, "bad_exif_keywords"))) if not "Date/Time Original" in Tagdict or not Tagdict[ "Date/Time Original"][i]: lines_date_missing.add( (os.path.basename(dirpath), _remove_counter(Tagdict["File Name"][i]))) if do_move and not "bad_exif" in dirpath: move( Tagdict["File Name"][i], dirpath, dirpath.replace( inpath, os.path.join(inpath, "bad_exif_date_missing"))) if check_date_additional and \ (("Date Created" in Tagdict and Tagdict["Date Created"][i]) or ("Time Created" in Tagdict and Tagdict["Time Created"][i]) or ("Create Date" in Tagdict and Tagdict["Create Date"][i]) or ("Modify Date" in Tagdict and Tagdict["Modify Date"][i]) or ("Digital Creation Date" in Tagdict and Tagdict["Digital Creation Date"][i])): lines_bad_date_additional.add( (os.path.basename(dirpath), _remove_counter(Tagdict["File Name"][i]))) if do_move and not "bad_exif" in dirpath: move( Tagdict["File Name"][i], dirpath, dirpath.replace( inpath, os.path.join(inpath, "bad_exif_date_additional"))) writer_no_tags.writerows(lines_no_tags) writer_bad_date_additional.writerows(lines_bad_date_additional) writer_date_missing.writerows(lines_date_missing) file_no_tags.close() file_bad_date_additional.close() file_date_missing.close() clock.finish()