Esempio n. 1
0
def fnvz(ticker):

    try:

        g = float(finviz.get_stock(ticker)['EPS next 5Y'].rstrip("%")) / 100

    except:

        try:

            g = float(
                finviz.get_stock(ticker)['EPS past 5Y'].rstrip("%")) / 100

        except:

            g = 0.05

    try:

        price = float(finviz.get_stock(ticker)['Price'])

        mc = float(
            float(
                str(finviz.get_stock(ticker)['Market Cap']).replace('B', '')) *
            1e9)

    except:

        url_prof = 'https://financialmodelingprep.com/api/company/profile/'

        url_ticker = urllib.parse.urljoin(url_prof, ticker)

        response_prof = requests.get(url_ticker)

        prof_data = json.loads(
            response_prof.text.replace("<pre>", "").replace("</pre>", ""))

        price = float(prof_data[ticker]['Price'])

        mc = float(prof_data[ticker]['MktCap'])

    try:

        EPS = float(finviz.get_stock(ticker)['EPS next Y'])

    except:

        EPS = 0

    return (g, price, mc, EPS)
Esempio n. 2
0
def database_function(UI, symbol):

    analyst_ratings = finviz.get_analyst_price_targets(symbol)
    infos = finviz.get_stock(symbol)

    wv, mv = infos['Volatility'].split(" ")[0], infos['Volatility'].split(
        " ")[1]
    dic = [
        symbol, infos['Price'], infos['Target Price'], infos['P/E'],
        infos['Change'], infos['Perf Week'], infos['Perf Month'],
        infos['Perf Quarter'], infos['Perf Half Y'], infos['Perf Year'], wv, mv
    ]

    for i in range(len(UI.info)):
        UI.info[i]["text"] = dic[i]

    vol = float(mv.strip("%")) * 0.158745

    price = float(infos['Price'])
    price_1, price_2 = database(symbol, 900, price, vol)

    for j in range(max(len(analyst_ratings), 5)):
        a = analyst_ratings[j]
        te = "Date: {}  Analyst: {}   Rating: {}  Price from: {} to {}".format(
            a["date"], a["analyst"], a["rating"], a["price_from"],
            a["price_to"])
        UI.ratings[j]["text"] = te

    # UI.Labels["text"] = " Symbol: {}    Last Price: {}    Weekly Volitality: {}%  Monthly Volitality: {}%".format(symbol, cur_price,vol,day_vol)

    #UI.status['text'] ="Downloading options data"

    UI.loadoptions(symbol, price_1, price_2, price)

    UI.Data["state"] = "normal"
def screener(l_args, s_ticker):
    parser = argparse.ArgumentParser(
        prog='screener',
        description=
        """Print several metrics about the company. The following fields are expected: 
                                                    Company, Sector, Industry, Country, Index, P/E, EPS (ttm), Insider Own, 
                                                    Shs Outstand, Perf Week, Market Cap, Forward P/E, EPS next Y, Insider Trans, 
                                                    Shs Float, Perf Month, Income, EPS next Q, Inst Own, Short Float, Perf Quarter, 
                                                    Sales, P/S, EPS this Y, Inst Trans, Short Ratio, Perf Half Y, Book/sh, P/B, ROA, 
                                                    Target Price, Perf Year, Cash/sh, P/C, ROE, 52W Range, Perf YTD, P/FCF, EPS past 5Y, 
                                                    ROI, 52W High, Beta, Quick Ratio, Sales past 5Y, Gross Margin, 52W Low, ATR, 
                                                    Employees, Current Ratio, Sales Q/Q, Oper. Margin, RSI (14), Volatility, Optionable, 
                                                    Debt/Eq, EPS Q/Q, Profit Margin, Rel Volume, Prev Close, Shortable, LT Debt/Eq, 
                                                    Earnings, Payout, Avg Volume, Price, Recom, SMA20, SMA50, SMA200, Volume, Change. 
                                                    [Source: Finviz]""")

    (ns_parser, l_unknown_args) = parser.parse_known_args(l_args)

    if l_unknown_args:
        print(
            f"The following args couldn't be interpreted: {l_unknown_args}\n")
        return

    d_finviz_stock = finviz.get_stock(s_ticker)
    df_fa = pd.DataFrame.from_dict(d_finviz_stock,
                                   orient='index',
                                   columns=['Values'])
    df_fa = df_fa[df_fa.Values != '-']
    print(df_fa.to_string(header=False))

    print("")
Esempio n. 4
0
def lookup(stock):
    stock_data.append(finviz.get_stock(stock))

    analyst_raw = {}
    analyst_raw['symbol'] = stock
    analyst_raw['info'] = finviz.get_analyst_price_targets(stock)
    analyst_data.append(analyst_raw)
Esempio n. 5
0
 def __init__(self, yahoo_ticker: YahooTicker):
     self.__yFinViz = FinViz.get_stock(yahoo_ticker.TickerName)
     print(self.__yFinViz.keys())
     print(self.__yFinViz.values())
     self.__setStockName()
     self.__setPeRatio()
     self.__setEpsTtm()
     self.__setDividend()
     self.__setDividendPcnt()
     self.__setBeta()
     self.__setPrice()
     self.__setShOutstand()
     self.__setMarketCap()
     self.__set52wHight()
     self.__set52wLow()
     self.__set52wRange()
     self.__setSalesQq()
     self.__setEpsQq()
     self.__setSma20()
     self.__setSma50()
     self.__setSma200()
     self.__setEarning()
     self.__setPayout()
     self.__setRelVolume()
     self.__setAvgVolume()
     self.__setVolume()
     self.__setChangePcnt()
 def __init__(self, a_ticker: str = 'CNI'):
     self._ticker = a_ticker
     self._fin_viz = FinViz.get_stock(a_ticker)
     print(self._fin_viz.keys())
     self.__setStockName()
     self.__setStockSector()
     self.__setStockIndustry()
     self.__setStockCountry()
     self.__setPeRatio()
     self.__setEarning()
     self.__setEpsTtm()
     self.__setDividend()
     self.__setDividendPcnt()
     self.__setBeta()
     self.__setPrice()
     self.__setShOutstand()
     self.__setMarketCap()
     self.__set52wHight()
     self.__set52wLow()
     self.__set52wRange()
     self.__setRsi14()
     self.__setVolatility()
     self.__setPayout()
     self.__setVolume()
     self.__setChangePcnt()
     self.__setPrice()
     '''
def get_fundamentals(ticker):
    try:
        fundamentals = json.dumps(finviz.get_stock(ticker))
        fundamentals = pd.read_json(fundamentals, orient='index')
        fundamentals = fundamentals.reset_index()
        fundamentals.columns = ['Attributes', 'Values']
        fundamentals = fundamentals.set_index('Attributes')
        return fundamentals
    except Exception as e:
        return e
Esempio n. 8
0
def GetPriceFvz(tkt):
    '''
    This function returns the latest price from finviz
    '''
    # @TODO: parameterize 'Price'
    try:
        price = fvz.get_stock(tkt)['Price']
    except Exception:
        price = 0
        print("price not found in finviz for: " + tkt)
    return price
Esempio n. 9
0
 def tktPrice(self, tkt, flag_process, etf_dict):
     if flag_process == cons_flag_etf_trad:
         try:
             price = finviz.get_stock(tkt)['Price']
         except Exception:
             price = 'n/a'
             print("price not found in finviz for: " + tkt)
     elif flag_process == cons_flag_etf_new:
         if tkt in etf_dict.keys():
             price = etf_dict[tkt]
         else:
             price = 'n/a'
     return float_or_na(price)
Esempio n. 10
0
def display_spac_community(limit: int = 10, popular: bool = False):
    """Look at tickers mentioned in r/SPACs [Source: Reddit]

    Parameters
    ----------
    limit: int
        Number of posts to look through
    popular: bool
        Search by popular instead of new
    """
    subs, d_watchlist_tickers = reddit_model.get_spac_community(limit, popular)
    for sub in subs:
        print_and_record_reddit_post({}, sub)

    if d_watchlist_tickers:
        lt_watchlist_sorted = sorted(d_watchlist_tickers.items(),
                                     key=lambda item: item[1],
                                     reverse=True)
        s_watchlist_tickers = ""
        n_tickers = 0
        for t_ticker in lt_watchlist_sorted:
            try:
                # If try doesn't trigger exception, it means that this stock exists on finviz
                # thus we can print it.
                finviz.get_stock(t_ticker[0])
                if int(t_ticker[1]) > 1:
                    s_watchlist_tickers += f"{t_ticker[1]} {t_ticker[0]}, "
                n_tickers += 1
            except Exception:
                # console.print(e, "\n")
                pass

        if n_tickers:
            console.print(
                "The following stock tickers have been mentioned more than once across the previous SPACs:"
            )
            console.print(s_watchlist_tickers[:-2])
    console.print("")
Esempio n. 11
0
File: TD.py Progetto: cruberg/guild
    def get_info(self,symbol):
        import finviz
        out = {}
        try:
            time.sleep(0.2)
            f = finviz.get_stock(symbol)
            out['Symbol'] = symbol
            out['Sector'] = f['Sector']
            out['Industry'] = f['Industry']
        
        except:
            print("{} Failed".format(symbol))

        return out
Esempio n. 12
0
def display_spac(limit: int = 5):
    """Look at posts containing 'spac' in top communities

    Parameters
    ----------
    limit: int
        Number of posts to get from each subreddit
    """
    warnings.filterwarnings("ignore")  # To avoid printing the warning
    subs, d_watchlist_tickers, n_flair_posts_found = reddit_model.get_spac(
        limit)
    for sub in subs:
        print_and_record_reddit_post({}, sub)

    if n_flair_posts_found > 0:
        lt_watchlist_sorted = sorted(d_watchlist_tickers.items(),
                                     key=lambda item: item[1],
                                     reverse=True)
        s_watchlist_tickers = ""
        n_tickers = 0
        for t_ticker in lt_watchlist_sorted:
            try:
                # If try doesn't trigger exception, it means that this stock exists on finviz
                # thus we can print it.
                finviz.get_stock(t_ticker[0])
                if int(t_ticker[1]) > 1:
                    s_watchlist_tickers += f"{t_ticker[1]} {t_ticker[0]}, "
                n_tickers += 1
            except Exception:
                pass
        if n_tickers:
            console.print(
                "The following stock tickers have been mentioned more than once across the previous SPACs:"
            )
            console.print(s_watchlist_tickers[:-2])
    console.print("")
Esempio n. 13
0
def display_watchlist(num: int):
    """Print other users watchlist. [Source: Reddit]

    Parameters
    ----------
    num: int
        Maximum number of submissions to look at
    """
    subs, d_watchlist_tickers, n_flair_posts_found = reddit_model.get_watchlists(
        num)
    for sub in subs:
        print_and_record_reddit_post({}, sub)
    if n_flair_posts_found > 0:
        lt_watchlist_sorted = sorted(d_watchlist_tickers.items(),
                                     key=lambda item: item[1],
                                     reverse=True)
        s_watchlist_tickers = ""
        n_tickers = 0
        for t_ticker in lt_watchlist_sorted:
            try:
                # If try doesn't trigger exception, it means that this stock exists on finviz
                # thus we can print it.
                finviz.get_stock(t_ticker[0])
                if int(t_ticker[1]) > 1:
                    s_watchlist_tickers += f"{t_ticker[1]} {t_ticker[0]}, "
                n_tickers += 1
            except Exception:
                # console.print(e, "\n")
                pass
        if n_tickers:
            console.print(
                "The following stock tickers have been mentioned more than once across the previous watchlists:"
            )
            console.print(s_watchlist_tickers[:-2] + "\n")

    console.print("")
Esempio n. 14
0
    def get_na_float(row):
        my_float = finviz.get_stock(row['symbol'])['Shs Float'] if np.isnan(
            row['float']) else row['float']

        def str_to_float(f):
            powers = {'K': 3, 'M': 6, 'B': 9}
            try:
                return float(f)
            except:
                try:
                    return float(f[:-1]) * (10**powers[f[-1]])
                except:
                    return np.nan

        return str_to_float(my_float)
Esempio n. 15
0
def screener(other_args: List[str], ticker: str):
    """FinViz ticker screener

    Parameters
    ----------
    other_args : List[str]
        argparse other args
    ticker : str
        Fundamental analysis ticker symbol
    """

    parser = argparse.ArgumentParser(
        add_help=False,
        prog="screener",
        description="""
            Print several metrics about the company. The following fields are expected:
            Company, Sector, Industry, Country, Index, P/E, EPS (ttm), Insider Own,
            Shs Outstand, Perf Week, Market Cap, Forward P/E, EPS next Y, Insider Trans,
            Shs Float, Perf Month, Income, EPS next Q, Inst Own, Short Float, Perf Quarter,
            Sales, P/S, EPS this Y, Inst Trans, Short Ratio, Perf Half Y, Book/sh, P/B, ROA,
            Target Price, Perf Year, Cash/sh, P/C, ROE, 52W Range, Perf YTD, P/FCF, EPS past 5Y,
            ROI, 52W High, Beta, Quick Ratio, Sales past 5Y, Gross Margin, 52W Low, ATR,
            Employees, Current Ratio, Sales Q/Q, Oper. Margin, RSI (14), Volatility, Optionable,
            Debt/Eq, EPS Q/Q, Profit Margin, Rel Volume, Prev Close, Shortable, LT Debt/Eq,
            Earnings, Payout, Avg Volume, Price, Recom, SMA20, SMA50, SMA200, Volume, Change.
            [Source: Finviz]
        """,
    )

    try:
        ns_parser = parse_known_args_and_warn(parser, other_args)
        if not ns_parser:
            return

        d_finviz_stock = finviz.get_stock(ticker)
        df_fa = pd.DataFrame.from_dict(d_finviz_stock,
                                       orient="index",
                                       columns=["Values"])
        df_fa = df_fa[df_fa.Values != "-"]
        print(df_fa.to_string(header=False))

        print("")

    except Exception as e:
        print(e)
        print("")
        return
Esempio n. 16
0
def get_data(ticker: str) -> pd.DataFrame:
    """Get fundamental data from finviz

    Parameters
    ----------
    ticker : str
        Stock ticker

    Returns
    -------
    pd.DataFrame
        DataFrame of fundamental data
    """
    d_finviz_stock = finviz.get_stock(ticker)
    df_fa = pd.DataFrame.from_dict(d_finviz_stock,
                                   orient="index",
                                   columns=["Values"])
    return df_fa[df_fa.Values != "-"]
Esempio n. 17
0
    def check_sell_rules(input_csv_path, output_csv_path):
        df = pd.read_csv(input_csv_path)
        df["Sell Golden Rule"] = False
        df["Sell Standard Profit Goal"] = False
        df["Place Trailing Stop for Decline from Peak Sell Rule"] = False
        df["Hold for 8 Weeks for Certainteed Exception Rule"] = False
        df["Sell for Bad Break Sell Rule"] = False

        for i, stock in df.iterrows():
            symbol = stock["Symbol"]
            print("---------------------------------")
            print(symbol)
            investment_date = datetime.datetime.strptime(
                stock['Buy Date'], '%m/%d/%y')
            avg_share_price = float(stock['Buy Price'].replace("$", ""))
            curr_price = float(finviz.get_stock(symbol)['Price'])

            df.loc[df.Symbol == symbol,
                   'Sell Golden Rule'] = SellRuleChecker.golden_sell_rule(
                       avg_share_price, curr_price=curr_price)
            df.loc[
                df.Symbol == symbol,
                'Sell Standard Profit Goal'] = SellRuleChecker.standard_profit_goal_sell_rule(
                    avg_share_price,
                    curr_price=curr_price,
                    investment_date=investment_date)
            df.loc[
                df.Symbol == symbol,
                'Place Trailing Stop for Decline from Peak Sell Rule'] = SellRuleChecker.decline_from_peak_sell_rule(
                    avg_share_price, curr_price=curr_price)
            df.loc[
                df.Symbol == symbol,
                'Hold for 8 Weeks for Certainteed Exception Rule'] = SellRuleChecker.certainteed_exception_rule(
                    avg_share_price,
                    curr_price=curr_price,
                    investment_date=investment_date)
            df.loc[
                df.Symbol == symbol,
                'Sell for Bad Break Sell Rule'] = SellRuleChecker.bad_break_sell_rule(
                    symbol)

            print("---------------------------------")
            df.to_csv(output_csv_path)
Esempio n. 18
0
def UpdatePlaybookPrices(sql_conn, df):
    notFoundTkt = []
    with tqdm.tqdm(total=len(df), file=sys.stdout) as pbar:
        for idx, row in df.iterrows():
            try:
                df.loc[idx, actual_price] = float(
                    finviz.get_stock(row[tkt])['Price'])
            except:
                print('passed ' + row[tkt])
                notFoundTkt.append(row[tkt])
                pass
            pbar.update()
    print('tkt not found in finviz: ' + str(notFoundTkt))
    #WriteSerialEtfScreen(df, kc.fileNamePickle, kc.testPath)
    print('serial output finished')
    dbh.WriteSQLTable(df, sql_conn, kc.db_table_cand)
    dbh.AlterSQLTable(sql_conn, query_change_type_cand)
    dbh.AlterSQLTable(sql_conn, query_add_pkey_cand)
    print('DB output finished')
    return df
Esempio n. 19
0
def stock(update, context):
    info_string = ""

    if len(context.args) == 1:

        ticker_info = finviz.get_stock(context.args[0])
        keys = [
            "Company", "Sector", "Industry", "Market Cap", "Price", "P/E",
            "52W Range", "Earnings", "Perf YTD", "Change", "Dividend",
            "Dividend %"
        ]

        for ticker in ticker_info:
            if ticker in keys:
                info_string = info_string + f"{ticker}: {ticker_info[ticker]}\n"

    else:
        info_string = "I can handle just one ticker at the time."

    update.message.reply_text(info_string)
Esempio n. 20
0
def AddPriceEtfScreen(sql_conn):
    df = dbh.ReadSQLTable(sql_conn, kc.db_table_etf)
    # Removing index received from db read
    df.drop('index', axis=1, inplace=True)
    notFoundEtf = []
    with tqdm.tqdm(total=len(df), file=sys.stdout) as pbar:
        for idx, row in df.iterrows():
            try:
                df.loc[idx, 'price'] = float(finviz.get_stock(row['symbol'])['Price'])
            except:
                # print('passed ' + row['symbol'])
                notFoundEtf.append(row['symbol'])
                pass
            pbar.update()
    print('etfs not found in finviz: ' + str(notFoundEtf))
    WriteSerialEtfScreen(df, kc.fileNamePickle, kc.testPath)
    print('serial output finished')
    dbh.WriteSQLTable(df, sql_conn, kc.db_table_etf)
    print('DB output finished')
    return df
Esempio n. 21
0
def quote(request):
    """HTTP Cloud Function.
    Args:
        request (flask.Request): The request object.
        <https://flask.palletsprojects.com/en/1.1.x/api/#incoming-request-data>
    Returns:
        The response text, or any set of values that can be turned into a
        Response object using `make_response`
        <https://flask.palletsprojects.com/en/1.1.x/api/#flask.make_response>.
    """
    request_json = request.get_json(silent=True)
    request_args = request.args

    if request_json and 't' in request_json:
        ticker = request_json['t']
    elif request_args and 't' in request_args:
        ticker = request_args['t']
    else:
        ticker = 'AMZN'

    return jsonify(finviz.get_stock(ticker))
Esempio n. 22
0
def get_popular_tickers(
    n_top: int, posts_to_look_at: int, subreddits: str = ""
) -> pd.DataFrame:
    """Get popular tickers from list of subreddits [Source: reddit]

    Parameters
    ----------
    n_top : int
        Number of top tickers to get
    posts_to_look_at : int
        How many posts to analyze in each subreddit
    subreddits : str, optional
        String of comma separated subreddits.

    Returns
    -------
    pd.DataFrame
        DataFrame of top tickers from supplied subreddits
    """
    if subreddits:
        sub_reddit_list = subreddits.split(",") if "," in subreddits else [subreddits]
    else:
        sub_reddit_list = l_sub_reddits
    d_watchlist_tickers: Dict = {}
    l_watchlist_author = []

    praw_api = praw.Reddit(
        client_id=cfg.API_REDDIT_CLIENT_ID,
        client_secret=cfg.API_REDDIT_CLIENT_SECRET,
        username=cfg.API_REDDIT_USERNAME,
        user_agent=cfg.API_REDDIT_USER_AGENT,
        password=cfg.API_REDDIT_PASSWORD,
    )

    psaw_api = PushshiftAPI()

    for s_sub_reddit in sub_reddit_list:
        console.print(
            f"Search for latest tickers for {posts_to_look_at} '{s_sub_reddit}' posts"
        )
        submissions = psaw_api.search_submissions(
            subreddit=s_sub_reddit,
            limit=posts_to_look_at,
            filter=["id"],
        )

        n_tickers = 0
        for submission in submissions:
            try:
                # Get more information about post using PRAW api
                submission = praw_api.submission(id=submission.id)

                # Ensure that the post hasn't been removed by moderator in the meanwhile,
                # that there is a description and it's not just an image, that the flair is
                # meaningful, and that we aren't re-considering same author's content
                if (
                    not submission.removed_by_category
                    and (submission.selftext or submission.title)
                    and submission.author.name not in l_watchlist_author
                ):
                    l_tickers_found = find_tickers(submission)

                    if l_tickers_found:
                        n_tickers += len(l_tickers_found)

                        # Add another author's name to the parsed watchlists
                        l_watchlist_author.append(submission.author.name)

                        # Lookup stock tickers within a watchlist
                        for key in l_tickers_found:
                            if key in d_watchlist_tickers:
                                # Increment stock ticker found
                                d_watchlist_tickers[key] += 1
                            else:
                                # Initialize stock ticker found
                                d_watchlist_tickers[key] = 1

            except ResponseException as e:
                logger.exception("Invalid response: %s", str(e))

                if "received 401 HTTP response" in str(e):
                    console.print("[red]Invalid API Key[/red]\n")
                else:
                    console.print(f"[red]Invalid response: {str(e)}[/red]\n")

                return pd.DataFrame()

        console.print(f"  {n_tickers} potential tickers found.")
    lt_watchlist_sorted = sorted(
        d_watchlist_tickers.items(), key=lambda item: item[1], reverse=True
    )

    if lt_watchlist_sorted:
        n_top_stocks = 0
        # pylint: disable=redefined-outer-name
        popular_tickers = []
        for t_ticker in lt_watchlist_sorted:
            if n_top_stocks > n_top:
                break
            try:
                # If try doesn't trigger exception, it means that this stock exists on finviz
                # thus we can print it.
                stock_info = finviz.get_stock(t_ticker[0])
                popular_tickers.append(
                    (
                        t_ticker[1],
                        t_ticker[0],
                        stock_info["Company"],
                        stock_info["Sector"],
                        stock_info["Price"],
                        stock_info["Change"],
                        stock_info["Perf Month"],
                        f"https://finviz.com/quote.ashx?t={t_ticker[0]}",
                    )
                )
                n_top_stocks += 1
            except HTTPError as e:
                if e.response.status_code != 404:
                    logger.exception("Unexpected exception from Finviz: %s", str(e))
                    console.print(f"Unexpected exception from Finviz: {e}")
            except Exception as e:
                logger.exception(str(e))
                console.print(e, "\n")
                return

        popular_tickers_df = pd.DataFrame(
            popular_tickers,
            columns=[
                "Mentions",
                "Ticker",
                "Company",
                "Sector",
                "Price",
                "Change",
                "Perf Month",
                "URL",
            ],
        )
    return popular_tickers_df
Esempio n. 23
0
def spac(l_args):
    parser = argparse.ArgumentParser(
        add_help=False,
        prog="spac",
        description=""" Show other users SPACs announcement [Reddit] """,
    )
    parser.add_argument(
        "-l",
        "--limit",
        action="store",
        dest="n_limit",
        type=check_positive,
        default=5,
        help="limit of posts with SPACs retrieved.",
    )

    try:
        ns_parser = parse_known_args_and_warn(parser, l_args)
        if not ns_parser:
            return

        praw_api = praw.Reddit(
            client_id=cfg.API_REDDIT_CLIENT_ID,
            client_secret=cfg.API_REDDIT_CLIENT_SECRET,
            username=cfg.API_REDDIT_USERNAME,
            user_agent=cfg.API_REDDIT_USER_AGENT,
            password=cfg.API_REDDIT_PASSWORD,
        )

        d_submission = {}
        d_watchlist_tickers = {}
        l_watchlist_links = list()
        l_watchlist_author = list()

        # n_ts_after = int(
        #    (datetime.today() - timedelta(days=ns_parser.n_days)).timestamp()
        # )
        l_sub_reddits = [
            "pennystocks",
            "RobinHoodPennyStocks",
            "Daytrading",
            "StockMarket",
            "stocks",
            "investing",
            "wallstreetbets",
        ]

        warnings.filterwarnings("ignore")  # To avoid printing the warning
        psaw_api = PushshiftAPI()
        submissions = psaw_api.search_submissions(
            # after=n_ts_after,
            subreddit=l_sub_reddits,
            q="SPAC|Spac|spac|Spacs|spacs",
            filter=["id"],
        )
        n_flair_posts_found = 0
        while True:
            try:
                submission = next(submissions, None)
                if submission:
                    # Get more information about post using PRAW api
                    submission = praw_api.submission(id=submission.id)

                    # Ensure that the post hasn't been removed  by moderator in the meanwhile,
                    # that there is a description and it's not just an image, that the flair is
                    # meaningful, and that we aren't re-considering same author's watchlist
                    if (
                        not submission.removed_by_category
                        and submission.selftext
                        and submission.link_flair_text not in ["Yolo", "Meme"]
                        and submission.author.name not in l_watchlist_author
                    ):
                        l_tickers_found = find_tickers(submission)

                        if l_tickers_found:
                            # Add another author's name to the parsed watchlists
                            l_watchlist_author.append(submission.author.name)

                            # Lookup stock tickers within a watchlist
                            for key in l_tickers_found:
                                if key in d_watchlist_tickers:
                                    # Increment stock ticker found
                                    d_watchlist_tickers[key] += 1
                                else:
                                    # Initialize stock ticker found
                                    d_watchlist_tickers[key] = 1

                            l_watchlist_links.append(
                                f"https://old.reddit.com{submission.permalink}"
                            )

                            print_and_record_reddit_post(d_submission, submission)

                            # Increment count of valid posts found
                            n_flair_posts_found += 1

                    # Check if number of wanted posts found has been reached
                    if n_flair_posts_found > ns_parser.n_limit - 1:
                        break

                # Check if search_submissions didn't get anymore posts
                else:
                    break
            except ResponseException:
                print(
                    "Received a response from Reddit with an authorization error. check your token.\n"
                )
                return

        if n_flair_posts_found:
            lt_watchlist_sorted = sorted(
                d_watchlist_tickers.items(), key=lambda item: item[1], reverse=True
            )
            s_watchlist_tickers = ""
            n_tickers = 0
            for t_ticker in lt_watchlist_sorted:
                try:
                    # If try doesn't trigger exception, it means that this stock exists on finviz
                    # thus we can print it.
                    finviz.get_stock(t_ticker[0])
                    if int(t_ticker[1]) > 1:
                        s_watchlist_tickers += f"{t_ticker[1]} {t_ticker[0]}, "
                    n_tickers += 1
                except Exception:
                    # print(e, "\n")
                    pass
            if n_tickers:
                print(
                    "The following stock tickers have been mentioned more than once across the previous SPACs:"
                )
                print(s_watchlist_tickers[:-2])
        print("")

    except Exception as e:
        print(e, "\n")
Esempio n. 24
0
def spac_community(l_args):
    parser = argparse.ArgumentParser(
        add_help=False,
        prog="spac_c",
        description="""Print other users SPACs announcement under subreddit 'SPACs' [Source: Reddit]""",
    )
    parser.add_argument(
        "-l",
        "--limit",
        action="store",
        dest="n_limit",
        type=check_positive,
        default=10,
        help="limit of posts with SPACs retrieved",
    )
    parser.add_argument(
        "-p",
        "--popular",
        action="store_true",
        default=False,
        dest="b_popular",
        help="popular flag, if true the posts retrieved are based on score rather than time",
    )

    try:
        ns_parser = parse_known_args_and_warn(parser, l_args)
        if not ns_parser:
            return

        praw_api = praw.Reddit(
            client_id=cfg.API_REDDIT_CLIENT_ID,
            client_secret=cfg.API_REDDIT_CLIENT_SECRET,
            username=cfg.API_REDDIT_USERNAME,
            user_agent=cfg.API_REDDIT_USER_AGENT,
            password=cfg.API_REDDIT_PASSWORD,
        )

        d_submission = {}
        d_watchlist_tickers = {}
        l_watchlist_links = list()
        l_watchlist_author = list()

        # psaw_api = PushshiftAPI()

        if ns_parser.b_popular:
            submissions = praw_api.subreddit("SPACs").hot(limit=ns_parser.n_limit)
        else:
            submissions = praw_api.subreddit("SPACs").new(limit=ns_parser.n_limit)

        while True:
            try:
                submission = next(submissions, None)
                if submission:
                    # Get more information about post using PRAW api
                    submission = praw_api.submission(id=submission.id)

                    # Ensure that the post hasn't been removed  by moderator in the meanwhile,
                    # that there is a description and it's not just an image, that the flair is
                    # meaningful, and that we aren't re-considering same author's watchlist
                    if (
                        not submission.removed_by_category
                        and submission.selftext
                        and submission.link_flair_text not in ["Yolo", "Meme"]
                        and submission.author.name not in l_watchlist_author
                    ):
                        l_tickers_found = find_tickers(submission)

                        if l_tickers_found:
                            # Add another author's name to the parsed watchlists
                            l_watchlist_author.append(submission.author.name)

                            # Lookup stock tickers within a watchlist
                            for key in l_tickers_found:
                                if key in d_watchlist_tickers:
                                    # Increment stock ticker found
                                    d_watchlist_tickers[key] += 1
                                else:
                                    # Initialize stock ticker found
                                    d_watchlist_tickers[key] = 1

                            l_watchlist_links.append(
                                f"https://old.reddit.com{submission.permalink}"
                            )

                            print_and_record_reddit_post(d_submission, submission)

                # Check if search_submissions didn't get anymore posts
                else:
                    break
            except ResponseException:
                print(
                    "Received a response from Reddit with an authorization error. check your token.\n"
                )
                return

        if d_watchlist_tickers:
            lt_watchlist_sorted = sorted(
                d_watchlist_tickers.items(), key=lambda item: item[1], reverse=True
            )
            s_watchlist_tickers = ""
            n_tickers = 0
            for t_ticker in lt_watchlist_sorted:
                try:
                    # If try doesn't trigger exception, it means that this stock exists on finviz
                    # thus we can print it.
                    finviz.get_stock(t_ticker[0])
                    if int(t_ticker[1]) > 1:
                        s_watchlist_tickers += f"{t_ticker[1]} {t_ticker[0]}, "
                    n_tickers += 1
                except Exception:
                    # print(e, "\n")
                    pass

            if n_tickers:
                print(
                    "The following stock tickers have been mentioned more than once across the previous SPACs:"
                )
                print(s_watchlist_tickers[:-2])
        print("")

    except Exception as e:
        print(e, "\n")
Esempio n. 25
0
def popular_tickers(l_args):
    parser = argparse.ArgumentParser(
        add_help=False,
        prog="popular",
        description="""Print latest popular tickers. [Source: Reddit] """,
    )
    parser.add_argument(
        "-n",
        "--number",
        action="store",
        dest="n_top",
        type=check_positive,
        default=10,
        help="display top N tickers",
    )
    parser.add_argument(
        "-l",
        "--limit",
        action="store",
        dest="n_limit",
        type=check_positive,
        default=50,
        help="limit of posts retrieved per sub reddit.",
    )
    parser.add_argument(
        "-s",
        "--sub",
        action="store",
        dest="s_subreddit",
        type=str,
        help="""
            subreddits to look for tickers, e.g. pennystocks,stocks.
            Default: pennystocks, RobinHoodPennyStocks, Daytrading, StockMarket, stocks, investing,
            wallstreetbets
        """,
    )

    try:
        ns_parser = parse_known_args_and_warn(parser, l_args)
        if not ns_parser:
            return

        # n_ts_after = int(
        #    (datetime.today() - timedelta(days=ns_parser.n_days)).timestamp()
        # )

        if ns_parser.s_subreddit:
            if "," in ns_parser.s_subreddit:
                l_sub_reddits = ns_parser.s_subreddit.split(",")
            else:
                l_sub_reddits = [ns_parser.s_subreddit]
        else:
            l_sub_reddits = [
                "pennystocks",
                "RobinHoodPennyStocks",
                "Daytrading",
                "StockMarket",
                "stocks",
                "investing",
                "wallstreetbets",
            ]

        # d_submission = {}
        d_watchlist_tickers = {}
        # l_watchlist_links = list()
        l_watchlist_author = list()

        praw_api = praw.Reddit(
            client_id=cfg.API_REDDIT_CLIENT_ID,
            client_secret=cfg.API_REDDIT_CLIENT_SECRET,
            username=cfg.API_REDDIT_USERNAME,
            user_agent=cfg.API_REDDIT_USER_AGENT,
            password=cfg.API_REDDIT_PASSWORD,
        )

        psaw_api = PushshiftAPI()

        for s_sub_reddit in l_sub_reddits:
            print(
                f"Search for latest tickers under {ns_parser.n_limit} '{s_sub_reddit}' posts"
            )
            submissions = psaw_api.search_submissions(
                # after=int(n_ts_after),
                subreddit=s_sub_reddit,
                limit=ns_parser.n_limit,
                filter=["id"],
            )

            n_tickers = 0
            while True:
                try:
                    submission = next(submissions, None)
                    if submission:
                        # Get more information about post using PRAW api
                        submission = praw_api.submission(id=submission.id)

                        # Ensure that the post hasn't been removed by moderator in the meanwhile,
                        # that there is a description and it's not just an image, that the flair is
                        # meaningful, and that we aren't re-considering same author's content
                        if (
                            not submission.removed_by_category
                            and (submission.selftext or submission.title)
                            and submission.author.name not in l_watchlist_author
                        ):
                            l_tickers_found = find_tickers(submission)

                            if l_tickers_found:
                                n_tickers += len(l_tickers_found)

                                # Add another author's name to the parsed watchlists
                                l_watchlist_author.append(submission.author.name)

                                # Lookup stock tickers within a watchlist
                                for key in l_tickers_found:
                                    if key in d_watchlist_tickers:
                                        # Increment stock ticker found
                                        d_watchlist_tickers[key] += 1
                                    else:
                                        # Initialize stock ticker found
                                        d_watchlist_tickers[key] = 1

                    # Check if search_submissions didn't get anymore posts
                    else:
                        break
                except ResponseException:
                    print(
                        "Received a response from Reddit with an authorization error. check your token.\n"
                    )
                    return

            print(f"  {n_tickers} potential tickers found.")

        lt_watchlist_sorted = sorted(
            d_watchlist_tickers.items(), key=lambda item: item[1], reverse=True
        )

        if lt_watchlist_sorted:
            n_top_stocks = 0
            # pylint: disable=redefined-outer-name
            popular_tickers = []
            for t_ticker in lt_watchlist_sorted:
                if n_top_stocks > ns_parser.n_top:
                    break
                try:
                    # If try doesn't trigger exception, it means that this stock exists on finviz
                    # thus we can print it.
                    stock_info = finviz.get_stock(t_ticker[0])
                    popular_tickers.append(
                        (
                            t_ticker[1],
                            t_ticker[0],
                            stock_info["Company"],
                            stock_info["Sector"],
                            stock_info["Price"],
                            stock_info["Change"],
                            stock_info["Perf Month"],
                            f"https://finviz.com/quote.ashx?t={t_ticker[0]}",
                        )
                    )
                    n_top_stocks += 1
                except HTTPError as e:
                    if e.response.status_code != 404:
                        print(f"Unexpected exception from Finviz: {e}")
                except Exception as e:
                    print(e, "\n")
                    return

            popular_tickers_df = pd.DataFrame(
                popular_tickers,
                columns=[
                    "Mentions",
                    "Ticker",
                    "Company",
                    "Sector",
                    "Price",
                    "Change",
                    "Perf Month",
                    "URL",
                ],
            )

            print(f"\nThe following TOP {ns_parser.n_top} tickers have been mentioned:")

            print(popular_tickers_df, "\n")
        else:
            print("No tickers found")

        print("")

    except ResponseException:
        print(
            "Received a response from Reddit with an authorization error. check your token.\n"
        )
        return

    except Exception as e:
        print(e, "\n")
Esempio n. 26
0
payload = pd.read_html(
    'https://en.wikipedia.org/wiki/List_of_S%26P_500_companies')
first_table = payload[0]
second_table = payload[1]

df = first_table

symbols = df['Symbol'].values.tolist()

#print(symbols)

found_tickers = []

for ticker in symbols:
    print("Get data for ticker - ", ticker)
    curprice = float((finviz.get_stock(ticker.replace(".", "-"))['Price']))
    range52 = (finviz.get_stock(ticker.replace(".", "-"))['52W Range'])
    low52 = float((range52.split('-')[0].strip()))
    high52 = float((range52.split('-')[1].strip()))
    try:
        ticker_radar = round((high52 - low52) / (high52 - curprice))
    except ZeroDivisionError:
        ticker_radar = 0
    if (ticker_radar > 4) and (ticker_radar < 30):
        try:
            if curprice > maxprice:
                print("Skip, Ticker by price - ", ticker)
            else:
                found_tickers.append(ticker)
        except NameError:
            found_tickers.append(ticker)
Esempio n. 27
0
def lookup(stock):
    stock_data.append(finviz.get_stock(stock))
Esempio n. 28
0
def spac(l_args):
    parser = argparse.ArgumentParser(
        prog='spac',
        description=""" Show other users SPACs announcement [Reddit] """)
    parser.add_argument('-l',
                        "--limit",
                        action="store",
                        dest="n_limit",
                        type=check_positive,
                        default=5,
                        help='limit of posts with SPACs retrieved.')
    parser.add_argument('-d',
                        "--days",
                        action="store",
                        dest="n_days",
                        type=check_positive,
                        default=5,
                        help="look for the tickers from those n past days.")

    try:
        (ns_parser, l_unknown_args) = parser.parse_known_args(l_args)

        if l_unknown_args:
            print(
                f"The following args couldn't be interpreted: {l_unknown_args}\n"
            )
            return

        praw_api = praw.Reddit(client_id=cfg.API_REDDIT_CLIENT_ID,
                               client_secret=cfg.API_REDDIT_CLIENT_SECRET,
                               username=cfg.API_REDDIT_USERNAME,
                               user_agent=cfg.API_REDDIT_USER_AGENT,
                               password=cfg.API_REDDIT_PASSWORD)

        d_submission = {}
        d_watchlist_tickers = {}
        l_watchlist_links = list()
        l_watchlist_author = list()

        n_ts_after = int(
            (datetime.today() - timedelta(days=ns_parser.n_days)).timestamp())
        l_sub_reddits = [
            'pennystocks', 'RobinHoodPennyStocks', 'Daytrading', 'StockMarket',
            'stocks', 'investing', 'wallstreetbets'
        ]

        warnings.filterwarnings("ignore")  # To avoid printing the warning
        psaw_api = PushshiftAPI()
        submissions = psaw_api.search_submissions(
            after=n_ts_after,
            subreddit=l_sub_reddits,
            q='SPAC|Spac|spac|Spacs|spacs',
            filter=['id'])
        n_flair_posts_found = 0
        while True:
            submission = next(submissions, None)
            if submission:
                # Get more information about post using PRAW api
                submission = praw_api.submission(id=submission.id)

                # Ensure that the post hasn't been removed  by moderator in the meanwhile,
                #that there is a description and it's not just an image, that the flair is
                #meaningful, and that we aren't re-considering same author's watchlist
                if not submission.removed_by_category and submission.selftext \
                    and submission.link_flair_text not in ['Yolo', 'Meme'] \
                    and submission.author.name not in l_watchlist_author:

                    ls_text = list()
                    ls_text.append(submission.selftext)
                    ls_text.append(submission.title)

                    submission.comments.replace_more(limit=0)
                    for comment in submission.comments.list():
                        ls_text.append(comment.body)

                    l_tickers_found = list()
                    for s_text in ls_text:
                        for s_ticker in set(
                                re.findall(r'([A-Z]{3,5} )', s_text)):
                            l_tickers_found.append(s_ticker.strip())

                    if l_tickers_found:
                        # Add another author's name to the parsed watchlists
                        l_watchlist_author.append(submission.author.name)

                        # Lookup stock tickers within a watchlist
                        for key in l_tickers_found:
                            if key in d_watchlist_tickers:
                                # Increment stock ticker found
                                d_watchlist_tickers[key] += 1
                            else:
                                # Initialize stock ticker found
                                d_watchlist_tickers[key] = 1

                        l_watchlist_links.append(
                            f"https://www.reddit.com{submission.permalink}")
                        # delete below, not necessary I reckon. Probably just link?

                        # Refactor data
                        s_datetime = datetime.utcfromtimestamp(
                            submission.created_utc).strftime(
                                "%d/%m/%Y %H:%M:%S")
                        s_link = f"https://www.reddit.com{submission.permalink}"
                        s_all_awards = ""
                        for award in submission.all_awardings:
                            s_all_awards += f"{award['count']} {award['name']}\n"
                        s_all_awards = s_all_awards[:-2]

                        # Create dictionary with data to construct dataframe allows to save data
                        d_submission[submission.id] = {
                            'created_utc': s_datetime,
                            'subreddit': submission.subreddit,
                            'link_flair_text': submission.link_flair_text,
                            'title': submission.title,
                            'score': submission.score,
                            'link': s_link,
                            'num_comments': submission.num_comments,
                            'upvote_ratio': submission.upvote_ratio,
                            'awards': s_all_awards
                        }

                        # Print post data collected so far
                        print(f"{s_datetime} - {submission.title}")
                        print(f"{s_link}")
                        t_post = PrettyTable([
                            'Subreddit', 'Flair', 'Score', '# Comments',
                            'Upvote %', "Awards"
                        ])
                        t_post.add_row([
                            submission.subreddit, submission.link_flair_text,
                            submission.score, submission.num_comments,
                            f"{round(100*submission.upvote_ratio)}%",
                            s_all_awards
                        ])
                        print(t_post)
                        print("\n")

                        # Increment count of valid posts found
                        n_flair_posts_found += 1

                # Check if number of wanted posts found has been reached
                if n_flair_posts_found > ns_parser.n_limit - 1:
                    break

            # Check if search_submissions didn't get anymore posts
            else:
                break

        if n_flair_posts_found:
            lt_watchlist_sorted = sorted(d_watchlist_tickers.items(),
                                         key=lambda item: item[1],
                                         reverse=True)
            s_watchlist_tickers = ""
            n_tickers = 0
            for t_ticker in lt_watchlist_sorted:
                try:
                    # If try doesn't trigger exception, it means that this stock exists on finviz
                    #thus we can print it.
                    finviz.get_stock(t_ticker[0])
                    if int(t_ticker[1]) > 1:
                        s_watchlist_tickers += f"{t_ticker[1]} {t_ticker[0]}, "
                    n_tickers += 1
                except:
                    pass
            if n_tickers:
                print(
                    "The following stock tickers have been mentioned more than once across the previous SPACs:"
                )
                print(s_watchlist_tickers[:-2])
        print("")

    except:
        print("")
Esempio n. 29
0
def popular_tickers(l_args):
    parser = argparse.ArgumentParser(
        prog='popular',
        description="""Print latest popular tickers. [Source: Reddit] """)
    parser.add_argument('-l',
                        "--limit",
                        action="store",
                        dest="n_limit",
                        type=check_positive,
                        default=50,
                        help='limit of posts retrieved per sub reddit.')
    parser.add_argument(
        '-s',
        "--sub",
        action="store",
        dest="s_subreddit",
        type=str,
        help="""subreddits to look for tickers, e.g. pennystocks,stocks.
                        Default: pennystocks, RobinHoodPennyStocks, Daytrading, StockMarket, stocks, investing, wallstreetbets"""
    )
    parser.add_argument('-d',
                        "--days",
                        action="store",
                        dest="n_days",
                        type=check_positive,
                        default=1,
                        help="look for the tickers from those n past days.")

    try:
        (ns_parser, l_unknown_args) = parser.parse_known_args(l_args)

        if l_unknown_args:
            print(
                f"The following args couldn't be interpreted: {l_unknown_args}\n"
            )
            return

        n_ts_after = int(
            (datetime.today() - timedelta(days=ns_parser.n_days)).timestamp())

        if ns_parser.s_subreddit:
            if ',' in ns_parser.s_subreddit:
                l_sub_reddits = ns_parser.s_subreddit.split(',')
            else:
                l_sub_reddits = [ns_parser.s_subreddit]
        else:
            l_sub_reddits = [
                'pennystocks', 'RobinHoodPennyStocks', 'Daytrading',
                'StockMarket', 'stocks', 'investing', 'wallstreetbets'
            ]

        d_submission = {}
        d_watchlist_tickers = {}
        l_watchlist_links = list()
        l_watchlist_author = list()

        praw_api = praw.Reddit(client_id=cfg.API_REDDIT_CLIENT_ID,
                               client_secret=cfg.API_REDDIT_CLIENT_SECRET,
                               username=cfg.API_REDDIT_USERNAME,
                               user_agent=cfg.API_REDDIT_USER_AGENT,
                               password=cfg.API_REDDIT_PASSWORD)

        psaw_api = PushshiftAPI()

        for s_sub_reddit in l_sub_reddits:
            print(
                f"Search for latest tickers under {ns_parser.n_limit} '{s_sub_reddit}' posts"
            )
            submissions = psaw_api.search_submissions(after=int(n_ts_after),
                                                      subreddit=s_sub_reddit,
                                                      limit=ns_parser.n_limit,
                                                      filter=['id'])

            n_tickers = 0
            while True:
                submission = next(submissions, None)
                if submission:
                    # Get more information about post using PRAW api
                    submission = praw_api.submission(id=submission.id)

                    # Ensure that the post hasn't been removed by moderator in the meanwhile,
                    #that there is a description and it's not just an image, that the flair is
                    #meaningful, and that we aren't re-considering same author's content
                    if not submission.removed_by_category and (submission.selftext or submission.title) \
                        and submission.author.name not in l_watchlist_author:
                        ls_text = list()
                        ls_text.append(submission.selftext)
                        ls_text.append(submission.title)

                        submission.comments.replace_more(limit=0)
                        for comment in submission.comments.list():
                            ls_text.append(comment.body)

                        l_tickers_found = list()
                        for s_text in ls_text:
                            for s_ticker in set(
                                    re.findall(r'([A-Z]{3,5} )', s_text)):
                                l_tickers_found.append(s_ticker.strip())

                        if l_tickers_found:
                            n_tickers += len(l_tickers_found)

                            # Add another author's name to the parsed watchlists
                            l_watchlist_author.append(submission.author.name)

                            # Lookup stock tickers within a watchlist
                            for key in l_tickers_found:
                                if key in d_watchlist_tickers:
                                    # Increment stock ticker found
                                    d_watchlist_tickers[key] += 1
                                else:
                                    # Initialize stock ticker found
                                    d_watchlist_tickers[key] = 1

                # Check if search_submissions didn't get anymore posts
                else:
                    break

            print(f"  {n_tickers} tickers found.")

        lt_watchlist_sorted = sorted(d_watchlist_tickers.items(),
                                     key=lambda item: item[1],
                                     reverse=True)
        if lt_watchlist_sorted:
            print(
                f"\nThe following TOP10 tickers have been mentioned in the last {ns_parser.n_days} days:"
            )
            n_top_stocks = 0
            for t_ticker in lt_watchlist_sorted:
                if n_top_stocks > 9:
                    break
                try:
                    # If try doesn't trigger exception, it means that this stock exists on finviz
                    #thus we can print it.
                    finviz.get_stock(t_ticker[0])
                    print(f"{t_ticker[1]} {t_ticker[0]}")
                    n_top_stocks += 1
                except:
                    pass
        else:
            print("No tickers found")
        print("")

    except:
        print("")
Esempio n. 30
0
    return response.json()


with open(csvfile, "r") as a_file:
    for line in a_file:
        stripped_line = line.strip()
        tickers.append(stripped_line)

telegram_bot_sendtext("Bot started", bot_token, bot_chatID)
olddata = dict()
while True:
    data = dict()
    for ticker in tickers:
        print("Processing ticker: ", ticker)
        current_price = float((finviz.get_stock(ticker.replace(".",
                                                               "-"))['Price']))
        data.update({ticker: current_price})
        if not olddata:
            n = ''
        else:
            key_list = list(olddata.keys())
            val_list = list(olddata.values())
            position = key_list.index(ticker)
            prevprice = val_list[position]
            percent = round(float((prevprice / current_price) * 100 - 100))
            if percent > ctrigger:
                print("Current price: ", current_price, "Previos price: ",
                      prevprice, "Change %:", percent)
                telegram_bot_sendtext(
                    ("Current price: " + current_price + " Previos price: " +
                     prevprice + " Change %:" + percent), bot_token,