s=Sentiment() debug=False # create sentiment analyzer from nltk.sentiment.vader import SentimentIntensityAnalyzer sid = SentimentIntensityAnalyzer() import statsmodels.api as sm1 from statsmodels.sandbox.regression.predstd import wls_prediction_std # univariate stacked lstm example from numpy import array from keras.models import Sequential from keras.layers import LSTM from keras.layers import Dense from keras.utils import plot_model api = ALPACA_REST() inputTxt='Honest Company reports Q1 EPS (13c) vs. 1c last year' inputTxt='Lennar reports Q2 adjusted EPS $2.95, consensus $2.36' inputTxt='Cognyte reports Q1 EPS (20c), consensus (15c)' inputTxt='Brookdale Senior Living resumed with a Buy at Stifel' inputTxt='Anglo American price target raised to 3,670 GBp from 3,500 GBp at Morgan Stanley' #inputTxt='GMS Inc. reports Q4 adjusted EPS $1.07, consensus 82c' #inputTxt='CalAmp reports Q1 adjusted EPS 8c, consensus 7c' #inputTxt='Adagene initiated with a Buy at China Renaissance' #inputTxt='Molecular Partners indicated to open at $20, IPO priced at $21.25' #inputTxt='WalkMe indicated to open at $33.20, IPO priced at $31' inputTxt='Bassett Furniture reports Q2 EPS 60c, two est. 35c' s.Parse(inputTxt,'Honest Company', 'HON', sid=sid, nlp=nlp, is_earnings=True) #s.Sentiment(sid=sid,nlp=nlp,is_earnings=is_earnings) print(s)
import alpaca_trade_api as tradeapi import os from ReadData import ALPACA_REST #from alpaca_trade_api.rest import REST #api = tradeapi.REST() #export APCA_API_BASE_URL='https://paper-api.alpaca.markets' #export APCA_API_BASE_URL='https://api.alpaca.markets' # for live markets #ALPACA_ID = os.getenv('ALPACA_ID') #ALPACA_PAPER_KEY = os.getenv('ALPACA_PAPER_KEY') #ALPHA_ID = os.getenv('ALPHA_ID') api = ALPACA_REST() #paper-api.alpaca.markets ticker = 'X' ticker = 'TSLA' # Get our position in AAPL. #aapl_position = api.get_position(ticker) # Get a list of all of our positions. portfolio = api.list_positions() #print(portfolio) # Print the quantity of shares for each position. for position in portfolio: print("{} shares of {} market: {} cost_basis: {}".format( position.qty, position.symbol, position.market_value, position.cost_basis)) # Get a list of all of our history. hist = api.get_portfolio_history(date_start='2021-07-01',
def GenerateSignal(ticker, out_file_name='out_bull_instructions.csv', price_targets=[]): connectionCal = SQL_CURSOR('earningsCalendarv2.db') fd = ALPHA_FundamentalData() sqlcursor = SQL_CURSOR() ts = ALPHA_TIMESERIES() api = ALPACA_REST() stockInfoQuarter, stockInfoAnnual, company_overview = CollectEarnings( ticker, connectionCal) # annual balance sheet balance_sheet_annual = fd.get_balance_sheet_annual(ticker)[0] balance_sheet_annual['fiscalDateEnding'] = pd.to_datetime( balance_sheet_annual['fiscalDateEnding'], errors='coerce') for d in balance_sheet_annual.columns: if d not in ['fiscalDateEnding', 'totalAssets']: balance_sheet_annual[d] = pd.to_numeric(balance_sheet_annual[d], errors='coerce') # quarterly income statement income_statement_quarterly = fd.get_income_statement_quarterly(ticker)[0] for d in income_statement_quarterly.columns: if d not in ['fiscalDateEnding', 'reportedCurrency']: income_statement_quarterly[d] = pd.to_numeric( income_statement_quarterly[d], errors='coerce') for d in ['fiscalDateEnding']: income_statement_quarterly[d] = pd.to_datetime( income_statement_quarterly[d], errors='coerce') if debug: print(income_statement_quarterly) print(income_statement_quarterly.dtypes) tstock_info, j = ConfigTable(ticker, sqlcursor, ts, 'full') spy, j = ConfigTable('SPY', sqlcursor, ts, 'compact') #print(spy.columns) est = pytz.timezone('US/Eastern') today = datetime.now(tz=est) + maindatetime.timedelta(minutes=-40) #today = datetime.utcnow() + maindatetime.timedelta(minutes=-30) d1 = today.strftime("%Y-%m-%dT%H:%M:%S-04:00") five_days = ( today + maindatetime.timedelta(days=-7)).strftime("%Y-%m-%dT%H:%M:%S-04:00") minute_prices = [] ntry = 0 while ntry < 3: try: minute_prices = runTicker(api, ticker, timeframe=TimeFrame.Minute, start=five_days, end=d1) break except (requests.exceptions.ConnectionError): ntry += 1 # may want to restrict to NYSE open times try: spy = AddInfo(spy, spy) tstock_info = AddInfo(tstock_info, spy, AddSupport=True) except (ValueError, KeyError): print('Error processing adding info %s' % ticker) recent_quotes = getQuotes(api, ticker) if debug: print(tstock_info[[ 'adj_close', 'sma20', 'sma20cen', 'vwap10cen', 'vwap10' ]][50:-10]) earn = Earnings(ticker, income_statement_quarterly, company_overview, balance_sheet_annual, stockInfoQuarter, stockInfoAnnual, tstock_info, minute_prices, recent_quotes) #earn.BuildPDF() earn.WriteCSV(out_file_name, price_targets)
#!/usr/bin/python import requests import time,os,sys from ta.trend import macd import numpy as np from datetime import datetime, timedelta from pytz import timezone import pandas as pd from ReadData import ALPACA_REST,ALPACA_STREAMCONN,runTicker from alpaca_trade_api.rest import TimeFrame from watchdog.observers import Observer from file_change_monitor import MyFileHandler # Load API api = ALPACA_REST() STOCK_DB_PATH = os.getenv('STOCK_DB_PATH') global _out_tickers session = requests.session() # We only consider stocks with per-share prices inside this range min_share_price = 2.0 max_share_price = 70.0 # Minimum previous-day dollar volume for a stock we might consider min_last_dv = 500000 # Stop limit to default to default_stop = .95 # How much of our portfolio to allocate to any one position risk = 0.001
'fit_expectations', 'stddev', 'fit_diff_significance', 'current_price' ]), ignore_index=True) else: # decide what information to update! certainly the current price pass return out_df if __name__ == "__main__": # execute only if run as a script # Collect APIs api = ALPACA_REST() ts = ALPHA_TIMESERIES() sqlcursor = SQL_CURSOR() # collect date and time filter_shift_days = 0 today = datetime.datetime.now(tz=est) #+ datetime.timedelta(minutes=5) outFileName = 'News/signif_%s_%s_%s.csv' % (today.day, today.month, today.year) inFileName = 'News/table_%s_%s_%s.csv' % ( today.day, today.month, today.year) # HACK!!! df = [] try: df = pd.read_csv(inFileName) except (FileNotFoundError) as e: print("Testing multiple exceptions. {}".format(e.args[-1]))
def main(args): stream = ALPACA_STREAM(data_feed='sip') api = ALPACA_REST() fleet = {} # Move old signals so that we do not consider them MoveOldSignals(api) # checking for trades to execute! event_handler = MyHandler(fleet,api,stream) observer = Observer(timeout=1) observer.schedule(event_handler, path='/Users/schae/testarea/finances/FinanceMonitor/Instructions/', recursive=True) observer.start() symbols = args.symbols for symbol in symbols: if args.lot>0: algo = BullishAlgo(api, symbol, lot=args.lot, limit=args.limit, target=args.target, df=[]) fleet[symbol] = algo # Trigger the loading of the trades event_handler.on_modified(True) async def on_bars(data): if data.symbol in fleet: fleet[data.symbol].on_bar(data) for symbol in symbols: print(symbol) sys.stdout.flush() #stream.subscribe_trades(on_bars, symbol) stream.subscribe_bars(on_bars, symbol) async def on_trade_updates(data): logger.info(f'trade_updates {data}') symbol = data.order['symbol'] if symbol in fleet: fleet[symbol].on_order_update(data.event, data.order) stream.subscribe_trade_updates(on_trade_updates) async def periodic(): while True: if not api.get_clock().is_open: logger.info('exit as market is not open') sys.exit(0) await asyncio.sleep(30) positions = api.list_positions() for symbol, algo in fleet.items(): pos = [p for p in positions if p.symbol == symbol] algo.checkup(pos[0] if len(pos) > 0 else None) loop = asyncio.get_event_loop() while 1: try: loop.run_until_complete(asyncio.gather(stream._run_forever(),periodic())) except (ConnectionResetError,urllib3.exceptions.ProtocolError,requests.exceptions.ConnectionError,APIError,ValueError,AttributeError,RuntimeError,TimeoutError): print('Connection error. will try to restart') pass loop.close() observer.stop() observer.join()
plt.axhline(y=h[0], color=h[1], linestyle=h[2]) #xmin=h[1], xmax=h[2], if doSupport: techindicators.supportLevels(my_stock_info) if draw: plt.show() if doPDFs: plt.savefig(outdir + '%s.pdf' % (saveName)) plt.savefig(outdir + '%s.png' % (saveName)) if not draw: plt.close() plt.close() ticker = 'X' j = 0 readType = 'full' sqlcursor = SQL_CURSOR() ts = ALPHA_TIMESERIES() api = ALPACA_REST() stock_info, j = ConfigTable(ticker, sqlcursor, ts, readType, j) trade_days = api.get_bars(ticker, TimeFrame.Minute, "2021-05-03", "2021-05-03", 'raw').df #trade_days = api.get_bars(ticker, TimeFrame.Minute, "2021-04-30", "2021-05-03T12:17:00-04:00", 'raw').df trade_days = trade_days.tz_convert(tz='US/Eastern') spy, j = ConfigTable('SPY', sqlcursor, ts, readType, hoursdelay=2) AddInfo(spy, spy, debug=debug) AddInfo(stock_info, spy, debug=debug) stock_info['sma20d'] = stock_info['adj_close'] - stock_info['sma20'] stock_infoc = stock_info #GetTimeSlot(stock_info,days=70) stock_infoc['daily_return'] = stock_infoc['adj_close'].pct_change()
import alpaca_trade_api import statsmodels.api as sm1 from statsmodels.sandbox.regression.predstd import wls_prediction_std from Earnings import GetIncomeStatement, GetPastEarnings, GetStockOverview, GetBalanceSheetQuarterly, GetBalanceSheetAnnual import base import plotly.graph_objects as go import plotly.figure_factory as ff from plotly.subplots import make_subplots import matplotlib.dates as mdates import pytz import datetime est = pytz.timezone('US/Eastern') api = ALPACA_REST() ts = ALPHA_TIMESERIES() STOCK_DB_PATH = os.getenv('STOCK_DB_PATH') sqlcursor = SQL_CURSOR('%s/stocksAV.db' % STOCK_DB_PATH) matplotlib.use("agg") sns.set_style('darkgrid') _lock = RendererAgg.lock def clear_form(): st.session_state["tickerKey"] = "Select" def FitWithBand(my_index,