Exemple #1
0
    def print_subreddits(parser, reddit, search_for):
        """
        Print valid and invalid Subreddits.

        Calls previously defined private method:

            PrintSubs._find_subs()

        Parameters
        ----------
        parser: ArgumentParser
            argparse ArgumentParser object
        reddit: Reddit object
            Reddit instance created by PRAW API credentials
        search_for: str
            String denoting Subreddits to scrape for

        Returns
        -------
        subs: list
            List of valid Subreddits
        not_subs: list
            List of invalid Subreddits
        """

        check_subs_spinner = Halo(color="white",
                                  text="Validating Subreddit(s).")
        print()
        check_subs_spinner.start()
        subs, not_subs = PrintSubs._find_subs(parser, reddit, search_for)
        check_subs_spinner.succeed("Finished Subreddit validation.")

        if subs:
            print(Fore.GREEN + Style.BRIGHT +
                  "\nThe following Subreddits were found and will be scraped:")
            print(Fore.GREEN + Style.BRIGHT + "-" * 56)
            print(*subs, sep="\n")
        if not_subs:
            print(
                Fore.YELLOW + Style.BRIGHT +
                "\nThe following Subreddits were not found and will be skipped:"
            )
            print(Fore.YELLOW + Style.BRIGHT + "-" * 60)
            print(*not_subs, sep="\n")

            logging.warning("Failed to validate the following Subreddits:")
            logging.warning("%s" % not_subs)
            logging.warning("Skipping.")
            logging.info("")

        if not subs:
            logging.critical("ALL SUBREDDITS FAILED VALIDATION.")
            Errors.n_title("Subreddits")
            logging.critical("NO SUBREDDITS LEFT TO SCRAPE.")
            logging.critical("ABORTING URS.\n")

            quit()

        return subs
Exemple #2
0
 def wrapper(*args):
     try:
         function(*args)
     except ValueError:
         Errors.e_title(f"INVALID {error}.")
         logging.critical(f"RECEIVED INVALID {error}.")
         logging.critical("ABORTING URS.\n")
         quit()
Exemple #3
0
 def wrapper(*args):
     try:
         return function(*args)
     except ValueError:
         Errors.n_title(reddit_object)
         logging.critical("NO %s LEFT TO SCRAPE." % reddit_object.upper())
         logging.critical("ABORTING URS.\n")
         quit()
Exemple #4
0
 def wrapper(*args):
     try:
         function(*args)
     except FileNotFoundError:
         Errors.i_title("Invalid `scrapes` directory structure.")
         logging.critical("AN ERROR HAS OCCURED WHILE PROCESSING SCRAPE DATA.")
         logging.critical("Invalid `scrapes` directory structure.\n")
         logging.critical("ABORTING URS.\n")
         quit()
Exemple #5
0
        def wrapper(*args):
            try:
                function(*args)

                logging.info(LogExport._get_export_switch(args[0]))
                logging.info("")
            except Exception as e:
                Errors.ex_title(e)
                logging.critical("AN ERROR HAS OCCURRED WHILE EXPORTING SCRAPED DATA.")
                logging.critical("%s" % e)
                logging.critical("ABORTING URS.\n")
                quit()
Exemple #6
0
 def wrapper(parser, reddit):
     try:
         function(parser, reddit)
         logging.info("Successfully logged in as u/%s." %
                      reddit.user.me())
         logging.info("")
     except PrawcoreException as error:
         Errors.p_title(error)
         logging.critical("LOGIN FAILED.")
         logging.critical("PRAWCORE EXCEPTION: %s." % error)
         logging.critical("ABORTING URS.\n")
         parser.exit()
Exemple #7
0
 def wrapper(*args):
     try:
         return function(*args)
     except ValueError:
         Errors.i_title("Scrape data is not located within the `scrapes` directory.")
         logging.critical("AN ERROR HAS OCCURRED WHILE PROCESSING SCRAPE DATA.")
         logging.critical("Scrape data is not located within the `scrapes` directory.")
         logging.critical("ABORTING URS.\n")
         quit()
     except TypeError:
         Errors.i_title("Invalid file format. Try again with a valid JSON file.")
         logging.critical("AN ERROR HAS OCCURRED WHILE PROCESSING SCRAPE DATA.")
         logging.critical("Invalid file format.")
         logging.critical("ABORTING URS.\n")
         quit()
Exemple #8
0
        def wrapper(reddit):
            user_limits = function(reddit)

            logging.info("RATE LIMIT DISPLAYED.")
            logging.info("Remaining requests: %s" % int(user_limits["remaining"]))
            logging.info("Used requests: %s" % user_limits["used"])
            logging.info("")

            if int(user_limits["remaining"]) == 0:
                Errors.l_title(convert_time(user_limits["reset_timestamp"]))
                logging.critical("RATE LIMIT REACHED. RATE LIMIT WILL RESET AT %s." % convert_time(user_limits["reset_timestamp"]))
                logging.critical("ABORTING URS.\n")
                quit()
            
            return user_limits
Exemple #9
0
    def validate_user(parser, reddit):
        """
        Check if PRAW credentials are valid, then print rate limit PrettyTable.

        Parameters
        ----------
        parser: ArgumentParser
            argparse ArgumentParser object
        reddit: Reddit object
            Reddit instance created by PRAW API credentials

        Returns
        -------
        None
        """

        login_spinner = Halo(color="white", text="Logging in.")
        login_spinner.start()

        try:
            redditor = reddit.user.me()

            login_spinner.succeed(Style.BRIGHT + Fore.GREEN +
                                  "Successfully logged in as u/%s." % redditor)
            print()

            Validation.print_rate_limit(reddit)

            logging.info("Successfully logged in as u/%s." % redditor)
            logging.info("")
        except PrawcoreException as error:
            login_spinner.fail(Style.BRIGHT + Fore.RED + "Failed to log in.")

            Errors.p_title(error)
            logging.critical("LOGIN FAILED.")
            logging.critical("PRAWCORE EXCEPTION: %s." % error)
            logging.critical("ABORTING URS.\n")
            parser.exit()
Exemple #10
0
    def validate(object_list, reddit, scraper_type):
        """
        Check if Subreddit(s), Redditor(s), or submission(s) exist and catch PRAW 
        exceptions. Log invalid Reddit objects to `urs.log` if applicable.

        Calls previously defined public method:

            Validation.check_existence()

        Parameters
        ----------
        object_list: list
            List of Reddit objects to check
        reddit: Reddit object
            Reddit instance created by PRAW API credentials
        scraper_type: str
            String denoting the scraper type

        Returns
        -------
        invalid: list
            List of invalid Reddit objects
        valid: list
            List of valid Reddit objects
        """

        object_type = "submission" \
            if scraper_type == "comments" \
            else scraper_type.capitalize()

        check_status = Status(
            "Finished %s validation." % object_type,
            "Validating %s(s)" % object_type,
            "white"
        )

        check_status.start()

        logging.info("Validating %s(s)..." % object_type)
        logging.info("")

        invalid, valid = Validation.check_existence(object_list, reddit, scraper_type)
        
        check_status.succeed()
        print()

        if invalid:
            warning_message = "The following %ss were not found and will be skipped:" % object_type

            print(Fore.YELLOW + Style.BRIGHT + warning_message)
            print(Fore.YELLOW + Style.BRIGHT + "-" * len(warning_message))
            print(*invalid, sep = "\n")

            logging.warning("Failed to validate the following %ss:" % object_type)
            logging.warning("%s" % (invalid))
            logging.warning("Skipping.")
            logging.info("")

        if not valid:
            logging.critical("ALL %sS FAILED VALIDATION." % object_type.upper())
            Errors.n_title(object_type + "s")
            logging.critical("NO %sS LEFT TO SCRAPE." % object_type.upper())
            logging.critical("ABORTING URS.\n")
            
            quit()

        return invalid, valid
Exemple #11
0
    def display_tree(search_date):
        """
        Display the scrapes directory for a specific date.

        Calls previously defined private methods:

            DateTree._check_date_format()
            DateTree._create_directory_tree()
            DateTree._find_date_directory()

        Parameters
        ----------
        search_date: str
            String denoting the date within the scrapes directory to search for

        Returns
        -------
        None
        """

        logging.info(f"Running tree command...")
        logging.info("")

        try:
            search_date = DateTree._check_date_format(search_date)

            find_dir_halo = Halo(
                color="white",
                text=f"Searching for {search_date} directory within `scrapes`."
            )

            find_dir_halo.start()

            dir_exists = DateTree._find_date_directory(search_date)
            if dir_exists:
                find_dir_halo.succeed(text=f"URS was run on {search_date}.")

                date_dir = f"{Path(Path.cwd()).parents[0]}/scrapes/{search_date}"

                tree = Tree(f"[bold blue]scrapes/")
                dir_tree = tree.add(f"[bold blue]{search_date}")

                DateTree._create_directory_tree(date_dir, dir_tree)

                rich.print(tree)
                logging.info(
                    f"Displayed directory tree for scrapes run on {search_date}."
                )
                logging.info("")
                print()
            else:
                error_messsage = f"URS was not run on {search_date}."
                find_dir_halo.fail(Fore.RED + Style.BRIGHT + error_messsage)
                print()

                logging.critical(error_messsage)
                logging.critical("ABORTING URS.\n")

                quit()
        except TypeError:
            logging.critical("INVALID DATE FORMAT.")
            logging.critical("ABORTING URS.\n")

            Errors.e_title(
                "INVALID DATE FORMAT. ACCEPTED FORMATS: MM-DD-YYYY or MM/DD/YYYY."
            )
            quit()