def perform_for_photoset(self, photoset: Photoset, args: Namespace, world: World, group: Group) -> None: archive = world.archive multiplexer = Multiplexer(photoset.parts) primary_destination_tree = self.__get_biggest_tree( [multiplexer.justin, multiplexer.photoclub, multiplexer.closed]) primary_destination_name = primary_destination_tree.name primary_destination = archive.get_destination(primary_destination_name) final_path = archive.path / primary_destination_name assert primary_destination is not None if primary_destination.has_categories: primary_category_name = self.__get_biggest_tree( primary_destination_tree.subtrees).name final_path /= primary_category_name print( f"Moving {photoset.name} to {final_path.relative_to(archive.path)}" ) photoset.move(path=final_path) archive.refresh()
def perform(self, args: Namespace, world: World, group: Group) -> None: if not any(util.resolve_patterns(args.name)): print("No items found for that pattern.") for path in util.resolve_patterns(args.name): photoset = Photoset(FolderTree(path)) self.perform_for_photoset(photoset, args, world, group)
def is_good(self, photoset: Photoset) -> bool: if not MetafileStateCheck.__metafile_required(photoset): return True if not photoset.has_metafile(): return False photoset_metafile = photoset.get_metafile() if MetafileStateCheck.__metafile_has_no_group_entries(photoset_metafile): return False for group_url in photoset_metafile.posts: post_metafiles = photoset_metafile.posts[group_url] if MetafileStateCheck.__group_entry_has_no_post_entries(post_metafiles) \ or MetafileStateCheck.__metafile_has_not_published_entries(post_metafiles) \ or MetafileStateCheck.__photoset_has_folders_not_in_metafile(photoset, post_metafiles): return False return True
def perform(self, args: Namespace, world: World, group: Group) -> None: all_locations = world.all_locations paths = list(util.resolve_patterns(args.name)) if not paths: return path = paths[0] path_location = world.location_of_path(path) new_locations = [loc for loc in all_locations if loc != path_location] if len(new_locations) == 0: print("Current location is the only available.") # number of locations is global, photoset may have only one location -> other's can't be moved too return elif len(new_locations) == 1: selected_location = new_locations[0] else: selected_location = util.ask_for_choice( f"Where would you like to move {path.name}?", new_locations) for path in paths: from_location = world.location_of_path(path) photoset = Photoset(FolderTree(path)) try: ChecksRunner.instance().run(photoset, self.__prechecks) new_path = selected_location / photoset.path.parent.relative_to( from_location) photoset.move(new_path) except CheckFailedError as error: print(f"Unable to move {photoset.name}: {error}")
def __collect_photosets(folder_root: FolderTree, structure_root: Structure) -> Iterable[Photoset]: result = [] if folder_root is None: return result for i in structure_root.substructures: result += Disk.__collect_photosets(folder_root[i.name], i) if structure_root.has_implicit_sets: photosets = [Photoset(subtree) for subtree in folder_root.subtrees] result += photosets return result
def backwards(self, photoset: Photoset): for part in photoset.parts: if not self.__run_prechecks(part): raise ExtractorError( f"Failed prechecks while running {self.__name} extractor backwards on {part.name}" ) filtered = part.tree[self.__filter_folder] if not filtered: continue filtered_photoset = Photoset(filtered) if not self.__run_prechecks(filtered_photoset): raise ExtractorError( f"Failed prechecks while running {self.__name} extractor backwards on {part.name}/" f"{self.__filter_folder}") filtered_set = RelativeFileset(filtered.path, filtered.flatten()) filtered_set.move_up() photoset.tree.refresh()
def perform(self, args: Namespace, world: World, group: Group) -> None: stage_tree = self.tree_with_sets(world) photosets = [Photoset(subtree) for subtree in stage_tree.subtrees] self.__check_for_publishing(photosets, world, group)
def perform(self, args: Namespace, world: World, group: Group) -> None: stage_tree = self.tree_with_sets(world) photosets = [Photoset(subtree) for subtree in stage_tree.subtrees] scheduled_posts = group.get_scheduled_posts() last_date = ScheduleAction.__get_start_date(scheduled_posts) date_generator = ScheduleAction.__date_generator(last_date) print("Performing scheduling... ", end="") upload_hierarchy = ScheduleAction.__get_not_uploaded_hierarchy(photosets, group.url) if len(upload_hierarchy) > 0: print() else: print("already done.") return for photoset, hashtags in upload_hierarchy.items(): print(f"Uploading photoset {photoset.name}") photoset_metafile = photoset.get_metafile() for hashtag, parts in hashtags.items(): print(f"Uploading #{hashtag}") for part in parts: part_path = part.path.relative_to(photoset.path) print(f"Uploading contents of {part_path}... ", end="", flush=True) assert len(part.subtrees) == 0 photo_files = part.files if hashtag != "report": attachments = self.get_simple_attachments(group, photo_files) else: attachments = self.get_report_attachments(group, photoset.name, part) post_datetime = next(date_generator) post = Post( text=f"#{hashtag}@{group.url}", attachments=attachments, date=post_datetime ) post_id = group.add_post(post) post_metafile = PostMetafile( path=part_path, post_id=post_id, status=PostStatus.SCHEDULED ) photoset_metafile.posts[group.url].append(post_metafile) photoset.save_metafile(photoset_metafile) print(f"successful, new post has id {post_id}")
def perform_for_photoset(self, photoset: Photoset, args: Namespace, world: World, group: Group) -> None: justin_folder = photoset.justin photoset_metafile = photoset.get_metafile() local_post_info = photoset_metafile.posts[group.url] posted_paths = [post.path for post in local_post_info] local_post_ids = {post.post_id for post in local_post_info} parts_to_upload = [] print(f"Fixing metafile for {photoset.name} photoset.") for hashtag in justin_folder.subtrees: parts = PartingHelper.folder_tree_parts(hashtag) for part in parts: part_path = part.path.relative_to(photoset.path) if part_path not in posted_paths: parts_to_upload.append(part) posts = group.get_posts() posts_id_mapping = {post.id: post for post in posts} for part in parts_to_upload: part_path = part.path.relative_to(photoset.path) while True: # handling post loop while True: # ask loop answer = input( f"You have folder \"{part_path}\" without corresponding post. What would you like?\n" f"* Enter a number - bind to existing post\n" f"* Enter a \"-\" symbol - leave it as is\n" f"* Just press Enter - open folder\n" f"> ") answer = answer.strip() if answer != "": break fs.open_file_manager(part.path) if answer == "-": break elif answer.isdecimal(): post_id = int(answer) if post_id in local_post_ids: print( "This post is already associated with other path") continue if post_id not in posts_id_mapping: print("There is no such post") continue post_metafile = PostMetafile(part_path, post_id, PostStatus.PUBLISHED) local_post_info.append(post_metafile) photoset_metafile.posts[group.url] = local_post_info photoset.save_metafile(photoset_metafile) break
def transfer(self, photoset: Photoset): photoset.move(photoset.path.parent / self.__path)
def perform(self, args: Namespace, world: World, group: Group) -> None: stage_tree = self.tree_with_sets(world) metasets = [Photoset(subtree) for subtree in stage_tree.subtrees] scheduled_posts = group.get_scheduled_posts() published_posts = group.get_posts() scheduled_ids = [post.id for post in scheduled_posts] published_ids = [post.id for post in published_posts] published_timed_ids = [post.timer_id for post in published_posts] published_mapping = dict(zip(published_timed_ids, published_ids)) print("Performing sync of local state with web...") for metaset in metasets: for photoset in metaset.parts: photoset_metafile = photoset.get_metafile() existing_posts = [] for post_metafile in photoset_metafile.posts[group.url]: post_id = post_metafile.post_id print(f"Syncing post with id {post_id}... ", end="") if post_metafile.status is PostStatus.SCHEDULED: if post_id in scheduled_ids: print("still scheduled") existing_posts.append(post_metafile) elif post_id in published_timed_ids: post_metafile.status = PostStatus.PUBLISHED post_metafile.post_id = published_mapping[post_id] print( f"was published, now has id {post_metafile.post_id}" ) existing_posts.append(post_metafile) elif post_id in published_ids: # scheduled id can't become an id for published post print("somehow ended in posted array, aborting...") assert False else: print("was deleted") elif post_metafile.status is PostStatus.PUBLISHED: assert post_id not in scheduled_ids assert post_id not in published_timed_ids if post_id in published_ids: print("still published") existing_posts.append(post_metafile) else: print("was deleted") photoset_metafile.posts[group.url] = existing_posts photoset.save_metafile(photoset_metafile) print("Performed successfully")