def order(): """ order by date using exif info """ log_function_call(order.__name__) inpath = os.getcwd() Tagdict = read_exiftags(file_types=settings.image_types, ignore_model=True) timeJumpDetector = TimeJumpDetector() time_old = giveDatetime() dircounter = 1 filenames = [] leng = len(list(Tagdict.values())[0]) dirNameDict_firsttime = OrderedDict() dirNameDict_lasttime = OrderedDict() time = giveDatetime(create_model(Tagdict, 0).get_date()) daystring = dateformating(time, "YYMMDD_") dirName = daystring + "%02d" % dircounter dirNameDict_firsttime[time] = dirName log().info('Number of JPG: %d', leng) for i in range(leng): model = create_model(Tagdict, i) time = giveDatetime(model.get_date()) if timeJumpDetector.isJump(time, len(filenames)): dirNameDict_lasttime[time_old] = dirName moveFiles(filenames, os.path.join(inpath, dirName)) filenames = [] if newdate(time, time_old): daystring = dateformating(time, "YYMMDD_") dircounter = 1 else: dircounter += 1 dirName = daystring + "%02d" % dircounter dirNameDict_firsttime[time] = dirName filenames.append((model.dir, model.filename)) time_old = time dirNameDict_lasttime[time_old] = dirName moveFiles(filenames, os.path.join(inpath, dirName)) print_firstlast_of_dirname(dirNameDict_firsttime, dirNameDict_lasttime) Tagdict_mp4 = read_exiftags(file_types=settings.video_types) if len(Tagdict_mp4) == 0: return leng = len(list(Tagdict_mp4.values())[0]) log().info('Number of mp4: %d', leng) for i in range(leng): model = create_model(Tagdict_mp4, i) time = giveDatetime(model.get_date()) dirName = find_dir_with_closest_time(dirNameDict_firsttime, dirNameDict_lasttime, time) if dirName: move(model.filename, model.dir, os.path.join(inpath, dirName, "mp4")) else: log().warning("Did not move %s to %s", model.filename, dirName)
def folders_to_main(series: bool = False, primary: bool = False, blurry: bool = False, dirs: list = None, one_level: bool = True, not_inpath: bool = True): """ reverses filtering/sorting into directories :param series: restrict to reverse of filterSeries :param primary: restrict to reverse of filterPrimary :param blurry: restrict to reverse of detectBlurry :param dirs: restrict to reverse other dirs :param one_level: reverse only one directory up :param not_inpath: leave all directories in inpath as they are, only change subdirectories """ log_function_call(folders_to_main.__name__, series, primary, blurry, dirs, one_level, not_inpath) inpath = os.getcwd() reverseDirs = [] if series: reverseDirs += ["B" + str(i) for i in range(1, 8)] + ["S", "single"] if primary: reverseDirs += ["B", "S", "TL", "SM", "primary"] if blurry: reverseDirs += ["blurry"] if dirs: reverseDirs += list(dirs) deepest = 0 for (dirpath, dirnames, filenames) in os.walk(inpath): if not_inpath and dirpath == inpath: continue if is_invalid_path(dirpath, whitelist=reverseDirs): continue depth = get_relpath_depth(dirpath, inpath) deepest = max(deepest, depth) if not_inpath: deepest -= 1 if not reverseDirs and deepest > 1: log().warning( "A folder structure with a depth of %2d will be flattened", deepest) askToContinue() elif deepest > 3: log().warning("The folder structure has a depth of %2d", deepest) log().info("chosen directory names: %r", reverseDirs) askToContinue() for (dirpath, dirnames, filenames) in os.walk(inpath): if not_inpath and dirpath == inpath: continue if is_invalid_path(dirpath, whitelist=reverseDirs): continue if one_level: destination = os.path.dirname(dirpath) else: destination = inpath log().info("%s #dirs:%d #files:%d", dirpath, len(dirnames), len(filenames)) for filename in filenames: if not file_has_ext(filename, settings.image_types + settings.video_types): continue move(filename, dirpath, destination) removeIfEmtpy(dirpath)
def move_each_pretag_to_folder(): """ """ log_function_call(move_each_pretag_to_folder.__name__) inpath = os.getcwd() for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath): continue for filename in filenames: filenameAccessor = FilenameAccessor(filename) if not filenameAccessor.pre in dirpath: move(filename, dirpath, os.path.join(dirpath, filenameAccessor.pre)) if len(filenameAccessor.primtags ) > 0 and not filenameAccessor.primtags[0] in dirpath: move(filename, dirpath, os.path.join(dirpath, *filenameAccessor.primtags))
def order_with_timetable(timefile: str = None): """ use timetable to create folder structure :param timefile: timetable file :return: """ if not timefile: timefile = get_info_dir("timetable.txt") log_function_call(order_with_timetable.__name__, timefile) dirNameDict_firsttime, dirNameDict_lasttime = _read_timetable(timefile) Tagdict = read_exiftags() leng = len(list(Tagdict.values())[0]) log().info('Number of jpg: %d', leng) for i in range(leng): model = create_model(Tagdict, i) time = giveDatetime(model.get_date()) dirName = find_dir_with_closest_time(dirNameDict_firsttime, dirNameDict_lasttime, time) if dirName: move(model.filename, model.dir, os.path.join(os.getcwd(), dirName))
def find_bad_exif(do_move=True, check_date_additional=False, folder: str = r""): """ find files with missing exif data """ log_function_call(find_bad_exif.__name__, do_move) clock = Clock() inpath = os.getcwd() lines_no_tags = OrderedSet() lines_bad_date_additional = OrderedSet() lines_date_missing = OrderedSet() out_filename_no_tags = get_info_dir("no_tags.csv") file_no_tags, writer_no_tags = fileop.create_csv_writer( out_filename_no_tags, ["directory", "name_part"]) out_filename_bad_date_additional = get_info_dir("bad_date_additional.csv") file_bad_date_additional, writer_bad_date_additional = fileop.create_csv_writer( out_filename_bad_date_additional, ["directory", "name_part"]) out_filename_date_missing = get_info_dir("date_missing.csv") file_date_missing, writer_date_missing = fileop.create_csv_writer( out_filename_date_missing, ["directory", "name_part"]) for (dirpath, dirnames, filenames) in os.walk(inpath): if is_invalid_path(dirpath, regex=folder): continue if fileop.count_files(filenames, settings.image_types) == 0: continue Tagdict = read_exiftags(dirpath, settings.image_types, ask=False) if len(list(Tagdict.values())) == 0: continue leng = len(list(Tagdict.values())[0]) for i in range(leng): if (not "Keywords" in Tagdict or not Tagdict["Keywords"][i]) or \ (not "Subject" in Tagdict or not Tagdict["Subject"][i]) or \ (not "Description" in Tagdict or not Tagdict["Description"][i]) or \ (not "User Comment" in Tagdict or not Tagdict["User Comment"][i]): lines_no_tags.add((os.path.basename(dirpath), _remove_counter(Tagdict["File Name"][i]))) if do_move and not "bad_exif" in dirpath: move( Tagdict["File Name"][i], dirpath, dirpath.replace( inpath, os.path.join(inpath, "bad_exif_keywords"))) if not "Date/Time Original" in Tagdict or not Tagdict[ "Date/Time Original"][i]: lines_date_missing.add( (os.path.basename(dirpath), _remove_counter(Tagdict["File Name"][i]))) if do_move and not "bad_exif" in dirpath: move( Tagdict["File Name"][i], dirpath, dirpath.replace( inpath, os.path.join(inpath, "bad_exif_date_missing"))) if check_date_additional and \ (("Date Created" in Tagdict and Tagdict["Date Created"][i]) or ("Time Created" in Tagdict and Tagdict["Time Created"][i]) or ("Create Date" in Tagdict and Tagdict["Create Date"][i]) or ("Modify Date" in Tagdict and Tagdict["Modify Date"][i]) or ("Digital Creation Date" in Tagdict and Tagdict["Digital Creation Date"][i])): lines_bad_date_additional.add( (os.path.basename(dirpath), _remove_counter(Tagdict["File Name"][i]))) if do_move and not "bad_exif" in dirpath: move( Tagdict["File Name"][i], dirpath, dirpath.replace( inpath, os.path.join(inpath, "bad_exif_date_additional"))) writer_no_tags.writerows(lines_no_tags) writer_bad_date_additional.writerows(lines_bad_date_additional) writer_date_missing.writerows(lines_date_missing) file_no_tags.close() file_bad_date_additional.close() file_date_missing.close() clock.finish()