def rename_back(timestring="", fileext=".JPG"): """ rename back using backup in saves; change to directory you want to rename back :param timestring: time of backup :param fileext: file extension :return: """ log_function_call(rename_back.__name__) dirname = get_saves_dir() tagFile = os.path.join(dirname, "Tags" + fileext + "_" + timestring + ".npz") if not timestring or os.path.isfile(tagFile): tagFiles = [x for x in os.listdir(dirname) if ".npz" in x] tagFile = os.path.join(dirname, tagFiles[-1]) Tagdict = np.load(tagFile, allow_pickle=True)["Tagdict"].item() temppostfix = renameTemp(Tagdict["Directory"], Tagdict["File Name new"]) log().debug("length of Tagdict: %d", len(list(Tagdict.values())[0])) for i in range(len(list(Tagdict.values())[0])): filename = Tagdict["File Name new"][i] + temppostfix if not os.path.isfile(os.path.join(Tagdict["Directory"][i], filename)): continue filename_old = Tagdict["File Name"][i] renameInPlace(Tagdict["Directory"][i], filename, filename_old) Tagdict["File Name new"][i], Tagdict["File Name"][i] = Tagdict[ "File Name"][i], Tagdict["File Name new"][i] timestring = dateformating(dt.datetime.now(), "_MMDDHHmmss") np.savez_compressed(os.path.join(dirname, "Tags" + fileext + timestring), Tagdict=Tagdict)
def filter_series(): """ put each kind of series in its own directory """ log_function_call(filter_series.__name__) inpath = os.getcwd() skipdirs = ["B" + str(i) for i in range(1, 8)] skipdirs += [ "S", "SM", "TL", "mp4", "HDR", "single", "PANO", "others", "TLM" ] # TLM: Timelapse manual - pictures on different days to be combined to a Timelapse skipdirs += [model for model in c.CameraModelShort.values() if model] log().info(inpath) for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, skipdirs): continue log().info("%s #dirs:%d #files:%d", dirpath, len(dirnames), len(filenames)) filenames = moveBracketSeries(dirpath, filenames) filenames = moveSeries(dirpath, filenames, "S") filenames = moveSeries(dirpath, filenames, "SM") filenames = moveSeries(dirpath, filenames, "TL") filenames = move_media(dirpath, filenames, settings.video_types, "mp4") # filter process types to separate folders - attention: ordering of statements matters filenames = move_media(dirpath, filenames, ["PANO"], "PANO") filenames = move_media(dirpath, filenames, ["ANIMA"], "ANIMA") filenames = move_media(dirpath, filenames, ["RET"], "RET") filenames = move_media(dirpath, filenames, ["ZOOM"], "ZOOM") filenames = move_media(dirpath, filenames, ["SMALL"], "SMALL") filenames = move_media(dirpath, filenames, ["CUT"], "CUT") filenames = move_media(dirpath, filenames, ["HDR"], "HDR") move_media(dirpath, filenames, settings.image_types, "single")
def update(self, directory): if not self.current_dir == directory: log().info("updated %4d tags in %s", self.counter, self.current_dir) self.counter = 0 self.current_dir = directory self.counter += 1
def rename_in_csvs(timestring="", fileext=".JPG"): """ use backup in saves; rename file reverences in csv in setexif directory :param timestring: time of backup :param fileext: file extension """ log_function_call(rename_in_csvs.__name__) dirname = get_saves_dir() tagFile = os.path.join(dirname, "Tags" + fileext + "_" + timestring + ".npz") if not timestring or os.path.isfile(tagFile): tagFiles = [x for x in os.listdir(dirname) if ".npz" in x] tagFile = os.path.join(dirname, tagFiles[-1]) Tagdict = np.load(tagFile, allow_pickle=True)["Tagdict"].item() log().debug("length of Tagdict: %d", len(list(Tagdict.values())[0])) csv_filenames = filterFiles(os.listdir(get_setexif_dir()), [".csv"]) csv_filenames = [ os.path.join(get_setexif_dir(), csv_filename) for csv_filename in csv_filenames ] for csv_filename in csv_filenames: with open(csv_filename, "r", encoding="utf-8") as file: data = file.read() for i in range(len(list(Tagdict.values())[0])): data = data.replace(Tagdict["File Name"][i], Tagdict["File Name new"][i]) with open(csv_filename, "w", encoding="utf-8") as file: file.write(data)
def sort_dict(indict: Dict[str, list], keys: list) -> Dict[str, list]: """example: sort indict by keys indict={"foo": [1, 3, 2], "bar": [8, 7, 6]} keys=["foo"] """ indictkeys = list(indict.keys()) cols = [indictkeys.index(key) for key in keys] lists = [] for i in range(len(list(indict.values())[0])): vals = [] for key in indictkeys: if key in indict: vals.append(indict[key][i]) else: log().warning("sortDict_badkey %s" % key) lists.append(vals) for col in reversed(cols): lists = sorted(lists, key=operator.itemgetter(col)) outdict = dict() for key in indict: outdict[key] = [] for vals in lists: for i, val in enumerate(vals): outdict[indictkeys[i]].append(val) return outdict
def sanitize_filename(folder=r"", posttags_to_end: List[str] = None, onlyprint=False): """ sanitize order of Scene and Process tags sanitize counter to be split by $ sanitize sub process names added by a external program to by concat to main processname (only Hugin) :param folder: optional regex for restrict to folders :param posttags_to_end: optional for resorting special posttags to end :param onlyprint: if true, renaming will only printed to log and no files are renamed, good for testing :return: """ inpath = os.getcwd() for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, regex=folder): continue for filename in (filenames + dirnames): filename = filename.replace("panorama", "PANO") filenameAccessor = FilenameAccessor(filename) _sanitize_posttags(filenameAccessor, posttags_to_end) _sanitize_process_counter(filenameAccessor) _sanitize_pano(filenameAccessor) filename_new = filenameAccessor.sorted_filename() if not filename == filename_new: log().info("rename: %s to %s", filename, filename_new) if not onlyprint: renameInPlace(dirpath, filename, filename_new)
def geotag(timezone: int = 2, offset: str = "", start_folder: str = ""): """ adds gps information to all pictures in all sub directories of current directory the gps information is obtained from gpx files, that are expected to be in a folder called ".gps" :param timezone: number of hours offset :param offset: offset in minutes and seconds, has to be in format +/-mm:ss e.g. -03:02 :param start_folder: directories before this name will be ignored, does not needs to be a full directory name """ log_function_call(geotag.__name__, offset, start_folder) inpath = os.getcwd() gpxDir = get_gps_dir() options = ["-r", "-geotime<${DateTimeOriginal}%+03d:00" % timezone] if offset: options.append("-geosync=" + offset) for (dirpath, dirnames, filenames) in os.walk(gpxDir): if not gpxDir == dirpath: break for filename in filenames: if not filename.endswith(".gpx"): continue options.append("-geotag") options.append(os.path.join(gpxDir, filename)) found_any = False for (dirpath, dirnames, filenames) in os.walk(inpath): if not inpath == dirpath: break for dirname in dirnames: if dirname.startswith("."): continue if dirname < start_folder: continue log().info(dirname) found_any = True call_exiftool(inpath, dirname, options=options) if not found_any: call_exiftool(inpath, options=options)
def renameTemp(DirectoryList: list, FileNameList: list) -> str: if not len(DirectoryList) == len(FileNameList): log().error("error in renameTemp: len(DirectoryList)!=len(FileNameList)") return "" temppostfix = "temp" for i in range(len(FileNameList)): rename_join((DirectoryList[i], FileNameList[i]), (DirectoryList[i], FileNameList[i] + temppostfix)) return temppostfix
def _decode_error(err) -> List[str]: try: err = err.decode(settings.encoding_format) except UnicodeDecodeError: try: err = err.decode("UTF-8") except UnicodeDecodeError: log().warning(err) err = "" return [line for line in err.split("\r\n") if line]
def copy_subdirectories(dest: str, dir_names: []): """ copy sub folders of specified names to dest without directory structure :param dest: copy destination :param dir_names: directory names to copy """ inpath = os.getcwd() log().info(inpath) for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, whitelist=dir_names): continue copyFilesTo(filenames, dest, False)
def has_not_keys(indict: dict, keys: list) -> bool: if not indict or not keys: return True notContains = [] for key in keys: if not key in indict: notContains.append(key) log().info("%s not in dict", key) return True if notContains: log().info("dictionary of tags doesn't contain: %s", notContains) return True return False
def rotate(subname: str = "", folder: str = r"", sign=1, override=True, ask=True): """ rotate back according to tag information (Rotate 90 CW or Rotate 270 CW) Some programs like franzis hdr projects rotate the resolution of the picture -> picture gets upward resolution and shown as rotated two times. This function reverses the resolution rotation according to exif info. Pictures that either have no rotation according to exif or have a normal resolution ratio are not modified. So calling it a second time wont change anything. :param subname: only files that contain this name are rotated, empty string: no restriction :param sign: direction of rotation :param folder: only files in directories that match this regex are rotated, empty string: no restriction :param override: override file with rotated one :param ask: if should ask for user confirmation """ log_function_call(rotate.__name__, subname, folder, sign, override, ask) from PIL import Image NFiles = 0 clock = Clock() inpath = os.getcwd() for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, regex=folder): continue if len(filenames) == 0: continue Tagdict = read_exiftags(dirpath, settings.image_types, ask=ask) if has_not_keys(Tagdict, keys=["Orientation"]): continue leng = len(list(Tagdict.values())[0]) for i in range(leng): # Load the original image: model = create_model(Tagdict, i) if not subname in model.filename: continue if model.is_rotated_by(0) or not model.is_upward(): continue name = model.get_path() log().info("rotate %s", model.filename) img = Image.open(name) if model.is_rotated_by(90): img_rot = img.rotate(90 * sign, expand=True) elif model.is_rotated_by(-90): img_rot = img.rotate(-90 * sign, expand=True) else: continue NFiles += 1 if not override: name = name[:name.rfind(".")] + "_ROTATED" + name[name. rfind("."):] img_rot.save(name, 'JPEG', quality=99, exif=img.info['exif']) clock.finish()
def copy_files(dest: str, sub_name: str = None): """ copy files which have names containing sub_name to dest without directory structure :param dest: copy destination :param sub_name: name part to search """ inpath = os.getcwd() log().info(inpath) found_files = [] for (dirpath, dirnames, filenames) in os.walk(inpath): for filename in filenames: if not sub_name or sub_name in filename: found_files.append(os.path.join(dirpath, filename)) copyFilesTo(found_files, dest, False)
def call_exiftool(dirpath: str, name: str = "", options: List[str] = None, override=True) -> (str, List[str]): if not options: options = [] fullname = os.path.join(dirpath, name) if name else dirpath (out, errorLines) = call_exiftool_direct(options + [fullname], override) # https://exiftool.org/faq.html#Q20 if any("Bad format (0) for IFD0 entry" in line or "Invalid XMP" in line or "Exif_0x0000 tag out of sequence in IFD0" in line or "IFD1 pointer references previous InteropIFD directory" in line for line in errorLines): log().info("try to fix Bad format") call_exiftool_direct( call_exiftool_direct.auto_fix_options_bad_format + [fullname], override) log().info("try again") (out, errorLines) = call_exiftool_direct(options + [fullname], override) if any("Bad MakerNotes offset" in line for line in errorLines): log().info("try to fix Bad MakerNotes") call_exiftool_direct( call_exiftool_direct.auto_fix_options_bad_makernotes + [fullname], override) log().info("try again") (out, errorLines) = call_exiftool_direct(options + [fullname], override) return out, errorLines
def decode_exiftags(tags: str) -> Dict[str, str]: tagDict = OrderedDict() for tag in tags.split("\r\n"): keyval = tag.split(": ", 1) if not len(keyval) == 2: continue key = keyval[0].strip() val = keyval[1].strip() if key in tagDict: continue if (key, val) in ModelBase.unknownTags: val = ModelBase.unknownTags[(key, val)] if key == "Directory": val = val.replace("/", os.sep) tagDict[key] = val if not tagDict: log().error("no tags extracted from: %r", tags) return tagDict
def folders_to_main(series: bool = False, primary: bool = False, blurry: bool = False, dirs: list = None, one_level: bool = True, not_inpath: bool = True): """ reverses filtering/sorting into directories :param series: restrict to reverse of filterSeries :param primary: restrict to reverse of filterPrimary :param blurry: restrict to reverse of detectBlurry :param dirs: restrict to reverse other dirs :param one_level: reverse only one directory up :param not_inpath: leave all directories in inpath as they are, only change subdirectories """ log_function_call(folders_to_main.__name__, series, primary, blurry, dirs, one_level, not_inpath) inpath = os.getcwd() reverseDirs = [] if series: reverseDirs += ["B" + str(i) for i in range(1, 8)] + ["S", "single"] if primary: reverseDirs += ["B", "S", "TL", "SM", "primary"] if blurry: reverseDirs += ["blurry"] if dirs: reverseDirs += list(dirs) deepest = 0 for (dirpath, dirnames, filenames) in os.walk(inpath): if not_inpath and dirpath == inpath: continue if is_invalid_path(dirpath, whitelist=reverseDirs): continue depth = get_relpath_depth(dirpath, inpath) deepest = max(deepest, depth) if not_inpath: deepest -= 1 if not reverseDirs and deepest > 1: log().warning( "A folder structure with a depth of %2d will be flattened", deepest) askToContinue() elif deepest > 3: log().warning("The folder structure has a depth of %2d", deepest) log().info("chosen directory names: %r", reverseDirs) askToContinue() for (dirpath, dirnames, filenames) in os.walk(inpath): if not_inpath and dirpath == inpath: continue if is_invalid_path(dirpath, whitelist=reverseDirs): continue if one_level: destination = os.path.dirname(dirpath) else: destination = inpath log().info("%s #dirs:%d #files:%d", dirpath, len(dirnames), len(filenames)) for filename in filenames: if not file_has_ext(filename, settings.image_types + settings.video_types): continue move(filename, dirpath, destination) removeIfEmtpy(dirpath)
def process_to_description(process: str) -> dict: description = {} if not "HDR" in process: return description process_striped = process.strip('123456789').split('$')[0] process_split = process_striped.split('-') if len(process_split) > 1: if process_split[1] in c.hdr_algorithm: description["HDR-Algorithm"] = c.hdr_algorithm[process_split[1]] else: log().info("%s not in hdr_algorithm", process_split[1]) if len(process_split) > 2: if process_split[2] in c.tm_preset: description["TM-Preset"] = c.tm_preset[process_split[2]] else: log().info("%s not in tm_preset", process_split[2]) return description
def fake_date(start='2000:01:01', write=True, folder_dict: OrderedDict = None): """ each file in a directory is one second later each dir is one day later :param start: the date on which to start generate fake dates :param write: whether should write or only print :param folder_dict: foldername to date """ log_function_call(fake_date.__name__, start) inpath = os.getcwd() start_time_part = ' 06:00:00.000' start_time = giveDatetime(start + start_time_part) dir_counter = -1 for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath): continue filenames = filterFiles(filenames, settings.image_types + settings.video_types) if not filenames or len(filenames) == 0: continue if folder_dict: dirpath_rel = os.path.relpath(dirpath, inpath) if not dirpath_rel in folder_dict: continue else: dirtime = giveDatetime(folder_dict[dirpath_rel] + start_time_part) else: dir_counter += 1 dirtime = start_time + dt.timedelta(days=dir_counter) log().info(dirtime) secounds = 0 minutes = 0 lastname = "" for filename in filenames: if len(filename) == len(lastname) and remove_ext(filename)[:-2] == remove_ext(lastname)[:-2]: secounds += 1 else: secounds = 0 minutes += 1 lastname = filename time = dirtime + dt.timedelta(minutes=minutes, seconds=secounds) time_string = dateformating(time, "YYYY:MM:DD HH:mm:ss") if write: # CreateDate is sometimes set and google fotos gives it precedence over DateTimeOriginal write_exiftag({"DateTimeOriginal": time_string}, dirpath, filename, ["-DateCreated=", "-TimeCreated=", "-CreateDate=", "-Artist=", "-DigitalCreationDate=", "-ModifyDate=", "-DateTimeDigitized="])
def print_timetable(): """ print the time of the first and last picture in a directory to a file """ inpath = os.getcwd() ofile = open(get_info_dir("timetable.txt"), 'a') for (dirpath, dirnames, filenames) in os.walk(inpath): if not inpath == dirpath: continue for dirname in dirnames: if dirname.startswith('.'): continue log().info("Folder: %s", dirname) fotos = get_filename_sorted_dirfiletuples(settings.image_types, inpath, dirname) if not fotos: continue first = _get_time(fotos[0]) last = _get_time(fotos[-1]) ofile.write("%-55s; %12s; %12s\n" % (dirname, first, last)) ofile.close()
def write_exiftags(tagDict: dict, inpath: str = "", options: List[str] = None): if not options: options = [] log_function_call_debug(write_exiftags.__name__, tagDict, inpath, options) if not inpath: inpath = os.getcwd() clock = Clock() for (dirpath, dirnames, filenames) in os.walk(inpath): if not settings.includeSubdirs and not inpath == dirpath: break n = count_files(filenames, settings.image_types + settings.video_types) if n == 0: log().info(" No matching files in %s", os.path.relpath(dirpath, inpath)) continue all_options = options + tag_dict_to_options(tagDict) call_exiftool(dirpath, "*", all_options, True) log().info("%4d tags written in %s", n, os.path.relpath(dirpath, inpath)) clock.finish()
def replace_in_file(search: str, replace: str, fileext: str): """ replace search with replace in files ending with fileext :param search: string to search for :param replace: string to replace :param fileext: type of file to search in """ inpath = os.getcwd() log().info(inpath) for (dirpath, dirnames, filenames) in os.walk(inpath): for filename in filenames: if filename.endswith(fileext): log().info(filename) fullfilename = os.path.join(dirpath, filename) with open(fullfilename, 'r') as file: content = file.read() content = content.replace(search, replace) with open(fullfilename, 'w') as file: file.write(content)
def is_invalid_path(dirpath: str, blacklist: List[str] = None, whitelist: List[str] = None, regex: str = r"", start: str = "") -> bool: inpath = os.getcwd() basename = os.path.basename(dirpath) # ignore what comes after special chars for exact match: # HDR-E-Nb would still be a match with HDR but HDR2 would be no match basename_split = basename.split("[-_ ]")[0] relpath = str(os.path.relpath(dirpath, inpath)) dirnames = relpath.split(os.sep) if not settings.includeSubdirs and not inpath == dirpath: return True if any(len(dirname) > 1 and dirname.startswith('.') for dirname in dirnames): return True if '.EXIFnaming' in dirpath: return True if '.data' in dirpath: return True if blacklist and any(basename_split == blacklistEntry for blacklistEntry in blacklist): return True if whitelist and not basename in whitelist: return True if regex and not re.search(regex, basename): return True if start and relpath.lower() < start.lower(): return True log().info(dirpath) return False
def location_to_keywords(): """ import location exif information to put into Keywords deprecated: try to use write_exif_using_csv() instead """ inpath = os.getcwd() log().info("process %d Files in %s, subdir: %r", count_files_in(inpath, settings.image_types + settings.video_types, ""), inpath, settings.includeSubdirs) askToContinue() Tagdict = read_exiftags(inpath, ask=False) leng = len(list(Tagdict.values())[0]) for i in range(leng): dirpath = Tagdict["Directory"][i] filename = Tagdict["File Name"][i] image_tags = Tagdict["Keywords"][i].split(', ') outTagDict = {'Keywords': image_tags, 'Subject': list(image_tags)} location = Location(Tagdict, i) add_dict(outTagDict, location.to_tag_dict()) write_exiftag(outTagDict, dirpath, filename)
def listsOfDicts_to_dictOfLists(listOfDicts: List[dict], ask=False) -> Dict[str, list]: """ :type listOfDicts: list :param ask: whether to ask for continue when keys not occur """ essential = ["File Name", "Directory", "File Modification Date/Time"] if not listOfDicts or not listOfDicts[0] or not listOfDicts[0].keys(): return OrderedDict() if has_not_keys(listOfDicts[0], essential): return OrderedDict() DictOfLists = OrderedDict() for key in listOfDicts[0]: val = listOfDicts[0][key] DictOfLists[key] = [val] badkeys = OrderedDict() for i, subdict in enumerate(listOfDicts[1:], start=1): for key in subdict: if not key in DictOfLists: badkeys[key] = i DictOfLists[key] = [""] * i DictOfLists[key].append(subdict[key]) for key in DictOfLists: if not key in subdict: if not key in badkeys: badkeys[key] = 0 badkeys[key] += 1 DictOfLists[key].append("") if badkeys: for key in essential: if key in badkeys: raise AssertionError( key + ' is essential but not in one of the files') if ask: log().warning( "Following keys did not occur in every file. Number of not occurrences is listed in following dictionary: %r", badkeys) askToContinue() return DictOfLists
def call_exiftool_direct(options: List[str] = None, override=True) -> (str, List[str]): if not options: options = [] log_function_call_debug(call_exiftool_direct.__name__, options, override) path = getExiftoolPath() encoding_args = [ "-charset", settings.encoding_format, "-charset", "FileName=" + settings.encoding_format ] args = [path + "exiftool"] + encoding_args + options if override and options: args.append("-overwrite_original_in_place") proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = proc.communicate() out = out.decode(settings.encoding_format) errorLines = _decode_error(err) for line in errorLines: log().warning(line) return out, errorLines
def order(): """ order by date using exif info """ log_function_call(order.__name__) inpath = os.getcwd() Tagdict = read_exiftags(file_types=settings.image_types, ignore_model=True) timeJumpDetector = TimeJumpDetector() time_old = giveDatetime() dircounter = 1 filenames = [] leng = len(list(Tagdict.values())[0]) dirNameDict_firsttime = OrderedDict() dirNameDict_lasttime = OrderedDict() time = giveDatetime(create_model(Tagdict, 0).get_date()) daystring = dateformating(time, "YYMMDD_") dirName = daystring + "%02d" % dircounter dirNameDict_firsttime[time] = dirName log().info('Number of JPG: %d', leng) for i in range(leng): model = create_model(Tagdict, i) time = giveDatetime(model.get_date()) if timeJumpDetector.isJump(time, len(filenames)): dirNameDict_lasttime[time_old] = dirName moveFiles(filenames, os.path.join(inpath, dirName)) filenames = [] if newdate(time, time_old): daystring = dateformating(time, "YYMMDD_") dircounter = 1 else: dircounter += 1 dirName = daystring + "%02d" % dircounter dirNameDict_firsttime[time] = dirName filenames.append((model.dir, model.filename)) time_old = time dirNameDict_lasttime[time_old] = dirName moveFiles(filenames, os.path.join(inpath, dirName)) print_firstlast_of_dirname(dirNameDict_firsttime, dirNameDict_lasttime) Tagdict_mp4 = read_exiftags(file_types=settings.video_types) if len(Tagdict_mp4) == 0: return leng = len(list(Tagdict_mp4.values())[0]) log().info('Number of mp4: %d', leng) for i in range(leng): model = create_model(Tagdict_mp4, i) time = giveDatetime(model.get_date()) dirName = find_dir_with_closest_time(dirNameDict_firsttime, dirNameDict_lasttime, time) if dirName: move(model.filename, model.dir, os.path.join(inpath, dirName, "mp4")) else: log().warning("Did not move %s to %s", model.filename, dirName)
def rename_HDR(mode="HDRT", folder=r"HDR\w*"): """ rename HDR pictures generated by FRANZIS HDR projects to a nicer form :param mode: name for HDR-Mode written to file :param folder: only files in folders of this name are renamed """ log_function_call(rename_HDR.__name__, mode, folder) matchreg = r"^([-\w]+_[0-9]+)B\d(.*)_(?:\d+B)?\d\2" inpath = os.getcwd() for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, regex=folder): continue log().info("Folder: %s", dirpath) for filename in filenames: if mode in filename: continue match = re.search(matchreg, filename) if match: _rename_match(dirpath, filename, mode, match) else: log().info("no match: %s", filename) for dirname in dirnames: match = re.search(matchreg, dirname) if match: _rename_match(dirpath, dirname, mode, match)
def filter_primary(): """ put single and B1 in same directory """ log_function_call(filter_primary.__name__) inpath = os.getcwd() skipdirs = ["S", "SM", "TL", "mp4", "HDR", "single", "PANO", "others"] skipdirs += [model for model in c.CameraModelShort.values() if model] log().info(inpath) folders_to_main(dirs=["B" + str(i) for i in range(1, 8)]) for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, skipdirs): continue log().info("%s #dirs:%d #files:%d", dirpath, len(dirnames), len(filenames)) filenames = moveSeries(dirpath, filenames, "S") filenames = moveSeries(dirpath, filenames, "SM") filenames = moveSeries(dirpath, filenames, "TL") filenames = move_media(dirpath, filenames, settings.video_types, "mp4") filenames = move_media(dirpath, filenames, ["HDR"], "HDR") filenames = moveSeries(dirpath, filenames, "B", "1", "primary") filenames = moveSeries(dirpath, filenames, "B") move_media(dirpath, filenames, settings.image_types, "primary")
def order_with_timetable(timefile: str = None): """ use timetable to create folder structure :param timefile: timetable file :return: """ if not timefile: timefile = get_info_dir("timetable.txt") log_function_call(order_with_timetable.__name__, timefile) dirNameDict_firsttime, dirNameDict_lasttime = _read_timetable(timefile) Tagdict = read_exiftags() leng = len(list(Tagdict.values())[0]) log().info('Number of jpg: %d', leng) for i in range(leng): model = create_model(Tagdict, i) time = giveDatetime(model.get_date()) dirName = find_dir_with_closest_time(dirNameDict_firsttime, dirNameDict_lasttime, time) if dirName: move(model.filename, model.dir, os.path.join(os.getcwd(), dirName))
def name_to_exif(folder=r"", additional_tags=(), startdir=None): """ extract title, description and mode from name and write them to exif deprecated: try to use write_exif_using_csv() instead """ inpath = os.getcwd() clock = Clock() file_types = settings.image_types + settings.video_types log().info("process %d Files in %s, subdir: %r", count_files_in(inpath, file_types, ""), inpath, settings.includeSubdirs) askToContinue() for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, regex=folder): continue filenames = filterFiles(filenames, file_types) for filename in filenames: meta_data = FileMetaData(dirpath, filename) if startdir: meta_data.import_fullname(startdir) else: meta_data.import_filename() meta_data.update({'tags': additional_tags}) write_exiftag(meta_data.to_tag_dict(), dirpath, filename) clock.finish()