Пример #1
0
def wsb_community(l_args):
    parser = argparse.ArgumentParser(
        add_help=False,
        prog="wsb",
        description="""Print what WSB gang are up to in subreddit wallstreetbets. [Source: Reddit]""",
    )
    parser.add_argument(
        "-l",
        "--limit",
        action="store",
        dest="n_limit",
        type=check_positive,
        default=10,
        help="limit of posts to print.",
    )
    parser.add_argument(
        "-n",
        "--new",
        action="store_true",
        default=False,
        dest="b_new",
        help="new flag, if true the posts retrieved are based on being more recent rather than their score.",
    )

    try:
        ns_parser = parse_known_args_and_warn(parser, l_args)
        if not ns_parser:
            return

        praw_api = praw.Reddit(
            client_id=cfg.API_REDDIT_CLIENT_ID,
            client_secret=cfg.API_REDDIT_CLIENT_SECRET,
            username=cfg.API_REDDIT_USERNAME,
            user_agent=cfg.API_REDDIT_USER_AGENT,
            password=cfg.API_REDDIT_PASSWORD,
        )

        d_submission = {}
        l_watchlist_links = list()

        # psaw_api = PushshiftAPI()

        if ns_parser.b_new:
            submissions = praw_api.subreddit("wallstreetbets").new(
                limit=ns_parser.n_limit
            )
        else:
            submissions = praw_api.subreddit("wallstreetbets").hot(
                limit=ns_parser.n_limit
            )
        while True:
            try:
                submission = next(submissions, None)
                if submission:
                    # Get more information about post using PRAW api
                    submission = praw_api.submission(id=submission.id)

                    # Ensure that the post hasn't been removed  by moderator in the meanwhile,
                    # that there is a description and it's not just an image, that the flair is
                    # meaningful, and that we aren't re-considering same author's watchlist
                    if not submission.removed_by_category:

                        l_watchlist_links.append(
                            f"https://old.reddit.com{submission.permalink}"
                        )

                        print_and_record_reddit_post(d_submission, submission)

                # Check if search_submissions didn't get anymore posts
                else:
                    break
            except ResponseException:
                print(
                    "Received a response from Reddit with an authorization error. check your token.\n"
                )
                return
    except Exception as e:
        print(e, "\n")
Пример #2
0
def spac_community(l_args):
    parser = argparse.ArgumentParser(
        add_help=False,
        prog="spac_c",
        description="""Print other users SPACs announcement under subreddit 'SPACs' [Source: Reddit]""",
    )
    parser.add_argument(
        "-l",
        "--limit",
        action="store",
        dest="n_limit",
        type=check_positive,
        default=10,
        help="limit of posts with SPACs retrieved",
    )
    parser.add_argument(
        "-p",
        "--popular",
        action="store_true",
        default=False,
        dest="b_popular",
        help="popular flag, if true the posts retrieved are based on score rather than time",
    )

    try:
        ns_parser = parse_known_args_and_warn(parser, l_args)
        if not ns_parser:
            return

        praw_api = praw.Reddit(
            client_id=cfg.API_REDDIT_CLIENT_ID,
            client_secret=cfg.API_REDDIT_CLIENT_SECRET,
            username=cfg.API_REDDIT_USERNAME,
            user_agent=cfg.API_REDDIT_USER_AGENT,
            password=cfg.API_REDDIT_PASSWORD,
        )

        d_submission = {}
        d_watchlist_tickers = {}
        l_watchlist_links = list()
        l_watchlist_author = list()

        # psaw_api = PushshiftAPI()

        if ns_parser.b_popular:
            submissions = praw_api.subreddit("SPACs").hot(limit=ns_parser.n_limit)
        else:
            submissions = praw_api.subreddit("SPACs").new(limit=ns_parser.n_limit)

        while True:
            try:
                submission = next(submissions, None)
                if submission:
                    # Get more information about post using PRAW api
                    submission = praw_api.submission(id=submission.id)

                    # Ensure that the post hasn't been removed  by moderator in the meanwhile,
                    # that there is a description and it's not just an image, that the flair is
                    # meaningful, and that we aren't re-considering same author's watchlist
                    if (
                        not submission.removed_by_category
                        and submission.selftext
                        and submission.link_flair_text not in ["Yolo", "Meme"]
                        and submission.author.name not in l_watchlist_author
                    ):
                        l_tickers_found = find_tickers(submission)

                        if l_tickers_found:
                            # Add another author's name to the parsed watchlists
                            l_watchlist_author.append(submission.author.name)

                            # Lookup stock tickers within a watchlist
                            for key in l_tickers_found:
                                if key in d_watchlist_tickers:
                                    # Increment stock ticker found
                                    d_watchlist_tickers[key] += 1
                                else:
                                    # Initialize stock ticker found
                                    d_watchlist_tickers[key] = 1

                            l_watchlist_links.append(
                                f"https://old.reddit.com{submission.permalink}"
                            )

                            print_and_record_reddit_post(d_submission, submission)

                # Check if search_submissions didn't get anymore posts
                else:
                    break
            except ResponseException:
                print(
                    "Received a response from Reddit with an authorization error. check your token.\n"
                )
                return

        if d_watchlist_tickers:
            lt_watchlist_sorted = sorted(
                d_watchlist_tickers.items(), key=lambda item: item[1], reverse=True
            )
            s_watchlist_tickers = ""
            n_tickers = 0
            for t_ticker in lt_watchlist_sorted:
                try:
                    # If try doesn't trigger exception, it means that this stock exists on finviz
                    # thus we can print it.
                    finviz.get_stock(t_ticker[0])
                    if int(t_ticker[1]) > 1:
                        s_watchlist_tickers += f"{t_ticker[1]} {t_ticker[0]}, "
                    n_tickers += 1
                except Exception:
                    # print(e, "\n")
                    pass

            if n_tickers:
                print(
                    "The following stock tickers have been mentioned more than once across the previous SPACs:"
                )
                print(s_watchlist_tickers[:-2])
        print("")

    except Exception as e:
        print(e, "\n")
Пример #3
0
def spac(l_args):
    parser = argparse.ArgumentParser(
        add_help=False,
        prog="spac",
        description=""" Show other users SPACs announcement [Reddit] """,
    )
    parser.add_argument(
        "-l",
        "--limit",
        action="store",
        dest="n_limit",
        type=check_positive,
        default=5,
        help="limit of posts with SPACs retrieved.",
    )

    try:
        ns_parser = parse_known_args_and_warn(parser, l_args)
        if not ns_parser:
            return

        praw_api = praw.Reddit(
            client_id=cfg.API_REDDIT_CLIENT_ID,
            client_secret=cfg.API_REDDIT_CLIENT_SECRET,
            username=cfg.API_REDDIT_USERNAME,
            user_agent=cfg.API_REDDIT_USER_AGENT,
            password=cfg.API_REDDIT_PASSWORD,
        )

        d_submission = {}
        d_watchlist_tickers = {}
        l_watchlist_links = list()
        l_watchlist_author = list()

        # n_ts_after = int(
        #    (datetime.today() - timedelta(days=ns_parser.n_days)).timestamp()
        # )
        l_sub_reddits = [
            "pennystocks",
            "RobinHoodPennyStocks",
            "Daytrading",
            "StockMarket",
            "stocks",
            "investing",
            "wallstreetbets",
        ]

        warnings.filterwarnings("ignore")  # To avoid printing the warning
        psaw_api = PushshiftAPI()
        submissions = psaw_api.search_submissions(
            # after=n_ts_after,
            subreddit=l_sub_reddits,
            q="SPAC|Spac|spac|Spacs|spacs",
            filter=["id"],
        )
        n_flair_posts_found = 0
        while True:
            try:
                submission = next(submissions, None)
                if submission:
                    # Get more information about post using PRAW api
                    submission = praw_api.submission(id=submission.id)

                    # Ensure that the post hasn't been removed  by moderator in the meanwhile,
                    # that there is a description and it's not just an image, that the flair is
                    # meaningful, and that we aren't re-considering same author's watchlist
                    if (
                        not submission.removed_by_category
                        and submission.selftext
                        and submission.link_flair_text not in ["Yolo", "Meme"]
                        and submission.author.name not in l_watchlist_author
                    ):
                        l_tickers_found = find_tickers(submission)

                        if l_tickers_found:
                            # Add another author's name to the parsed watchlists
                            l_watchlist_author.append(submission.author.name)

                            # Lookup stock tickers within a watchlist
                            for key in l_tickers_found:
                                if key in d_watchlist_tickers:
                                    # Increment stock ticker found
                                    d_watchlist_tickers[key] += 1
                                else:
                                    # Initialize stock ticker found
                                    d_watchlist_tickers[key] = 1

                            l_watchlist_links.append(
                                f"https://old.reddit.com{submission.permalink}"
                            )

                            print_and_record_reddit_post(d_submission, submission)

                            # Increment count of valid posts found
                            n_flair_posts_found += 1

                    # Check if number of wanted posts found has been reached
                    if n_flair_posts_found > ns_parser.n_limit - 1:
                        break

                # Check if search_submissions didn't get anymore posts
                else:
                    break
            except ResponseException:
                print(
                    "Received a response from Reddit with an authorization error. check your token.\n"
                )
                return

        if n_flair_posts_found:
            lt_watchlist_sorted = sorted(
                d_watchlist_tickers.items(), key=lambda item: item[1], reverse=True
            )
            s_watchlist_tickers = ""
            n_tickers = 0
            for t_ticker in lt_watchlist_sorted:
                try:
                    # If try doesn't trigger exception, it means that this stock exists on finviz
                    # thus we can print it.
                    finviz.get_stock(t_ticker[0])
                    if int(t_ticker[1]) > 1:
                        s_watchlist_tickers += f"{t_ticker[1]} {t_ticker[0]}, "
                    n_tickers += 1
                except Exception:
                    # print(e, "\n")
                    pass
            if n_tickers:
                print(
                    "The following stock tickers have been mentioned more than once across the previous SPACs:"
                )
                print(s_watchlist_tickers[:-2])
        print("")

    except Exception as e:
        print(e, "\n")
Пример #4
0
def due_diligence(l_args, s_ticker):
    parser = argparse.ArgumentParser(
        add_help=False,
        prog="red",
        description="""
            Print top stock's due diligence from other users. [Source: Reddit]
        """,
    )

    parser.add_argument(
        "-l",
        "--limit",
        action="store",
        dest="n_limit",
        type=check_positive,
        default=5,
        help="limit of posts to retrieve.",
    )
    parser.add_argument(
        "-d",
        "--days",
        action="store",
        dest="n_days",
        type=check_positive,
        default=3,
        help="number of prior days to look for.",
    )
    parser.add_argument(
        "-a",
        "--all",
        action="store_true",
        dest="b_all",
        default=False,
        help="""
            search through all flairs (apart from Yolo and Meme), otherwise we focus on
            specific flairs: DD, technical analysis, Catalyst, News, Advice, Chart
        """,
    )

    try:
        ns_parser = parse_known_args_and_warn(parser, l_args)
        if not ns_parser:
            return

        praw_api = praw.Reddit(
            client_id=cfg.API_REDDIT_CLIENT_ID,
            client_secret=cfg.API_REDDIT_CLIENT_SECRET,
            username=cfg.API_REDDIT_USERNAME,
            user_agent=cfg.API_REDDIT_USER_AGENT,
            password=cfg.API_REDDIT_PASSWORD,
        )

        psaw_api = PushshiftAPI()

        n_ts_after = int(
            (datetime.today() - timedelta(days=ns_parser.n_days)).timestamp()
        )
        l_flair_text = [
            "DD",
            "technical analysis",
            "Catalyst",
            "News",
            "Advice",
            "Chart",
        ]
        l_sub_reddits = [
            "pennystocks",
            "RobinHoodPennyStocks",
            "Daytrading",
            "StockMarket",
            "stocks",
            "investing",
            "wallstreetbets",
        ]

        submissions = psaw_api.search_submissions(
            after=int(n_ts_after), subreddit=l_sub_reddits, q=s_ticker, filter=["id"]
        )
        d_submission = {}
        n_flair_posts_found = 0
        while True:
            submission = next(submissions, None)
            if submission:
                # Get more information about post using PRAW api
                submission = praw_api.submission(id=submission.id)

                # Ensure that the post hasn't been removed in the meanwhile
                if not submission.removed_by_category:

                    # Either just filter out Yolo, and Meme flairs, or focus on DD, based on b_DD flag
                    if (
                        submission.link_flair_text in l_flair_text,
                        submission.link_flair_text not in ["Yolo", "Meme"],
                    )[ns_parser.b_all]:

                        print_and_record_reddit_post(d_submission, submission)

                        # If needed, submission.comments could give us the top comments

                        # Increment count of valid posts found
                        n_flair_posts_found += 1

                # Check if number of wanted posts found has been reached
                if n_flair_posts_found > ns_parser.n_limit - 1:
                    break

            # Check if search_submissions didn't get anymore posts
            else:
                break

        print(
            f"{('No more posts with specified requirements found.', '')[n_flair_posts_found > ns_parser.n_limit-1]}"
        )
        # Create df with found data. Useful for saving all info in excel file.
        # df_submissions = pd.DataFrame.from_dict(
        #   d_submission, orient='index', columns=list(d_submission[next(iter(d_submission.keys()))].keys())
        # )
        # df_submissions.sort_values(by=['created_utc'], inplace=True, ascending=True)

    except Exception as e:
        print(e)
        print("")