示例#1
0
    def _create_settings(args, parser, reddit):
        """
        Create settings for each user input. 
        
        Calls methods from an external modules:

            GetPRAWScrapeSettings().create_list()
            Validation.validate()
            GetPRAWScrapeSettings().get_settings()
            Global.make_list_dict()

        Parameters
        ----------
        args: Namespace
            Namespace object containing all arguments that were defined in the CLI 
        parser: ArgumentParser
            argparse ArgumentParser object
        reddit: Reddit object
            Reddit instance created by PRAW API credentials

        Returns
        -------
        s_master: dict
            Dictionary containing all scrape settings
        """

        sub_list = GetPRAWScrapeSettings().create_list(args, "subreddit")
        not_subs, subs = Validation.validate(sub_list, reddit, "subreddit")
        s_master = make_list_dict(subs)
        GetPRAWScrapeSettings().get_settings(args, not_subs, s_master,
                                             "subreddit")

        return s_master
示例#2
0
    def _set_info_and_object(args, reddit):
        """
        Set the stream information and Reddit object based on CLI args.

        Parameters
        ----------
        args: Namespace
            Namespace object containing all arguments that were defined in the CLI
        reddit: PRAW Reddit object

        Returns
        -------
        reddit_object: PRAW Subreddit or Redditor object
        stream_info: str
            String denoting the livestream information
        """

        if args.live_subreddit:
            PRAWTitles.lr_title()

            Validation.validate([args.live_subreddit], reddit, "subreddit")

            initial_message = f"Initializing Subreddit livestream for r/{args.live_subreddit}."
            
            stream_info = f"in r/{args.live_subreddit}"
            reddit_object = reddit.subreddit(args.live_subreddit)

        elif args.live_redditor:
            PRAWTitles.lu_title()

            Validation.validate([args.live_redditor], reddit, "redditor")

            initial_message = f"Initializing Redditor livestream for u/{args.live_redditor}."
            
            stream_info = f"by u/{args.live_redditor}"
            reddit_object = reddit.redditor(args.live_redditor)
        
        Halo().info(Fore.CYAN + Style.BRIGHT + initial_message)
        logging.info(initial_message + "..")
        Halo().info("New entries will appear when posted to Reddit.")

        return reddit_object, stream_info
示例#3
0
    def test_validate_all_valid_reddit_objects(self):
        reddit = Login.create_reddit_object()

        object_list = ["askreddit", "wallstreetbets", "cscareerquestions"]

        scraper_type = "subreddit"

        invalid, valid = Validation.validate(object_list, reddit, scraper_type)

        assert len(valid) == 3
        assert not invalid
示例#4
0
    def test_validate_all_invalid_reddit_objects_force_quit(self):
        reddit = Login.create_reddit_object()

        object_list = [
            "shdg8h342842h3gidbsfgjdbs",
            "asdfhauhwspf8912034812hudfghb979023974ht",
            "xcvhcsxiuvbeidefgh3qw48tr324805tyasdguap;l"
        ]

        scraper_type = "subreddit"

        try:
            _, _ = Validation.validate(object_list, reddit, scraper_type)
            assert False
        except SystemExit:
            assert True
示例#5
0
    def test_validate_both_valid_and_invalid_reddit_objects(self):
        reddit = Login.create_reddit_object()

        object_list = [
            "askreddit", "wallstreetbets", "cscareerquestions",
            "shdg8h342842h3gidbsfgjdbs",
            "asdfhauhwspf8912034812hudfghb979023974ht",
            "xcvhcsxiuvbeidefgh3qw48tr324805tyasdguap;l"
        ]

        scraper_type = "subreddit"

        invalid, valid = Validation.validate(object_list, reddit, scraper_type)

        assert len(valid) == 3
        assert len(invalid) == 3
示例#6
0
    def run(args, parser, reddit):
        """
        Run comments scraper.

        Calls a previously defined public method:

            Write.write()

        Calls public methods from external modules:

            GetPRAWScrapeSettings().create_list()
            Validation.validate()
            GetPRAWScrapeSettings().get_settings()
            Global.make_none_dict()

            PRAWTitles.c_title()

        Parameters
        ----------
        args: Namespace
            Namespace object containing all arguments that were defined in the CLI
        parser: ArgumentParser
            argparse ArgumentParser object
        reddit: Reddit object
            Reddit instance created by PRAW API credentials

        Returns
        -------
        c_master: dict
            Dictionary containing all submission comments scrape settings
        """

        PRAWTitles.c_title()

        post_list = GetPRAWScrapeSettings().create_list(args, "comments")
        not_posts, posts = Validation.validate(post_list, reddit, "comments")
        c_master = make_none_dict(posts)
        GetPRAWScrapeSettings().get_settings(args, not_posts, c_master,
                                             "comments")

        Write.write(args, c_master, reddit)

        return c_master
示例#7
0
    def run(args, parser, reddit):
        """
        Get, sort, then write scraped Redditor information to CSV or JSON.

        Calls a previously defined public method:

            Write.write()

        Calls public methods from external modules: 

            GetPRAWScrapeSettings().create_list()
            Validation.validate()
            Global.make_none_dict()
            GetPRAWScrapeSettings().get_settings()

        Parameters
        ----------
        args: Namespace
            Namespace object containing all arguments that were defined in the CLI 
        parser: ArgumentParser
            argparse ArgumentParser object
        reddit: Reddit object
            Reddit instance created by PRAW API credentials

        Returns
        -------
        u_master: dict
            Dictionary containing all Redditor scrape settings
        """

        PRAWTitles.u_title()

        user_list = GetPRAWScrapeSettings().create_list(args, "redditor")
        not_users, users = Validation.validate(user_list, parser, reddit,
                                               "redditor")
        u_master = make_none_dict(users)
        GetPRAWScrapeSettings().get_settings(args, not_users, u_master,
                                             "redditor")

        Write.write(args, reddit, u_master)

        return u_master