Ejemplo n.º 1
0
    def perform(self, args: Namespace, world: World, group: Group) -> None:
        if not any(util.resolve_patterns(args.name)):
            print("No items found for that pattern.")

        for path in util.resolve_patterns(args.name):
            photoset = Photoset(FolderTree(path))

            self.perform_for_photoset(photoset, args, world, group)
Ejemplo n.º 2
0
    def __collect_photosets(folder_root: FolderTree,
                            structure_root: Structure) -> Iterable[Photoset]:
        result = []

        if folder_root is None:
            return result

        for i in structure_root.substructures:
            result += Disk.__collect_photosets(folder_root[i.name], i)

        if structure_root.has_implicit_sets:
            photosets = [Photoset(subtree) for subtree in folder_root.subtrees]

            result += photosets

        return result
Ejemplo n.º 3
0
    def perform(self, args: Namespace, world: World, group: Group) -> None:
        all_locations = world.all_locations

        paths = list(util.resolve_patterns(args.name))

        if not paths:
            return

        path = paths[0]

        path_location = world.location_of_path(path)

        new_locations = [loc for loc in all_locations if loc != path_location]

        if len(new_locations) == 0:
            print("Current location is the only available.")

            # number of locations is global, photoset may have only one location -> other's can't be moved too
            return
        elif len(new_locations) == 1:
            selected_location = new_locations[0]
        else:
            selected_location = util.ask_for_choice(
                f"Where would you like to move {path.name}?", new_locations)

        for path in paths:
            from_location = world.location_of_path(path)

            photoset = Photoset(FolderTree(path))

            try:
                ChecksRunner.instance().run(photoset, self.__prechecks)

                new_path = selected_location / photoset.path.parent.relative_to(
                    from_location)

                photoset.move(new_path)

            except CheckFailedError as error:
                print(f"Unable to move {photoset.name}: {error}")
Ejemplo n.º 4
0
    def backwards(self, photoset: Photoset):
        for part in photoset.parts:
            if not self.__run_prechecks(part):
                raise ExtractorError(
                    f"Failed prechecks while running {self.__name} extractor backwards on {part.name}"
                )

            filtered = part.tree[self.__filter_folder]

            if not filtered:
                continue

            filtered_photoset = Photoset(filtered)

            if not self.__run_prechecks(filtered_photoset):
                raise ExtractorError(
                    f"Failed prechecks while running {self.__name} extractor backwards on {part.name}/"
                    f"{self.__filter_folder}")

            filtered_set = RelativeFileset(filtered.path, filtered.flatten())

            filtered_set.move_up()

        photoset.tree.refresh()
Ejemplo n.º 5
0
    def perform(self, args: Namespace, world: World, group: Group) -> None:
        stage_tree = self.tree_with_sets(world)

        photosets = [Photoset(subtree) for subtree in stage_tree.subtrees]

        self.__check_for_publishing(photosets, world, group)
Ejemplo n.º 6
0
    def perform(self, args: Namespace, world: World, group: Group) -> None:
        stage_tree = self.tree_with_sets(world)

        photosets = [Photoset(subtree) for subtree in stage_tree.subtrees]

        scheduled_posts = group.get_scheduled_posts()

        last_date = ScheduleAction.__get_start_date(scheduled_posts)
        date_generator = ScheduleAction.__date_generator(last_date)

        print("Performing scheduling... ", end="")

        upload_hierarchy = ScheduleAction.__get_not_uploaded_hierarchy(photosets, group.url)

        if len(upload_hierarchy) > 0:
            print()
        else:
            print("already done.")

            return

        for photoset, hashtags in upload_hierarchy.items():
            print(f"Uploading photoset {photoset.name}")

            photoset_metafile = photoset.get_metafile()

            for hashtag, parts in hashtags.items():
                print(f"Uploading #{hashtag}")

                for part in parts:
                    part_path = part.path.relative_to(photoset.path)

                    print(f"Uploading contents of {part_path}... ", end="", flush=True)

                    assert len(part.subtrees) == 0

                    photo_files = part.files

                    if hashtag != "report":
                        attachments = self.get_simple_attachments(group, photo_files)
                    else:
                        attachments = self.get_report_attachments(group, photoset.name, part)

                    post_datetime = next(date_generator)

                    post = Post(
                        text=f"#{hashtag}@{group.url}",
                        attachments=attachments,
                        date=post_datetime
                    )

                    post_id = group.add_post(post)

                    post_metafile = PostMetafile(
                        path=part_path,
                        post_id=post_id,
                        status=PostStatus.SCHEDULED
                    )

                    photoset_metafile.posts[group.url].append(post_metafile)
                    photoset.save_metafile(photoset_metafile)

                    print(f"successful, new post has id {post_id}")
Ejemplo n.º 7
0
    def perform(self, args: Namespace, world: World, group: Group) -> None:
        stage_tree = self.tree_with_sets(world)

        metasets = [Photoset(subtree) for subtree in stage_tree.subtrees]

        scheduled_posts = group.get_scheduled_posts()
        published_posts = group.get_posts()

        scheduled_ids = [post.id for post in scheduled_posts]

        published_ids = [post.id for post in published_posts]
        published_timed_ids = [post.timer_id for post in published_posts]
        published_mapping = dict(zip(published_timed_ids, published_ids))

        print("Performing sync of local state with web...")

        for metaset in metasets:
            for photoset in metaset.parts:

                photoset_metafile = photoset.get_metafile()

                existing_posts = []

                for post_metafile in photoset_metafile.posts[group.url]:
                    post_id = post_metafile.post_id

                    print(f"Syncing post with id {post_id}... ", end="")

                    if post_metafile.status is PostStatus.SCHEDULED:

                        if post_id in scheduled_ids:
                            print("still scheduled")

                            existing_posts.append(post_metafile)

                        elif post_id in published_timed_ids:
                            post_metafile.status = PostStatus.PUBLISHED
                            post_metafile.post_id = published_mapping[post_id]

                            print(
                                f"was published, now has id {post_metafile.post_id}"
                            )

                            existing_posts.append(post_metafile)
                        elif post_id in published_ids:
                            # scheduled id can't become an id for published post

                            print("somehow ended in posted array, aborting...")

                            assert False

                        else:
                            print("was deleted")

                    elif post_metafile.status is PostStatus.PUBLISHED:
                        assert post_id not in scheduled_ids
                        assert post_id not in published_timed_ids

                        if post_id in published_ids:
                            print("still published")

                            existing_posts.append(post_metafile)
                        else:
                            print("was deleted")

                photoset_metafile.posts[group.url] = existing_posts
                photoset.save_metafile(photoset_metafile)

        print("Performed successfully")