def __init__(self): with open("Bot/config.yaml", "r") as yamlfile: data = yaml.load(yamlfile, Loader=yaml.FullLoader) # self.key = data['LIVE_API_KEY'] # self.secret_key = data['LIVE_SECRET_KEY'] self.key = data['ALPACA_API_KEY'] self.secret_key = data['ALPACA_SECRET_KEY'] self.base_url = data['PAPER_URL'] self.DB_HOST = data['DB_HOST'] self.DB_USER = data['DB_USER'] self.DB_PASS = data['DB_PASS'] self.DB_NAME = data['DB_NAME'] yamlfile.close() # The connection to the Alpaca API self.api = REST(self.key, self.secret_key, self.base_url) self.connection = psycopg2.connect(host=self.DB_HOST, database=self.DB_NAME, user=self.DB_USER, password=self.DB_PASS) self.connection.autocommit = True self.cursor = self.connection.cursor( cursor_factory=psycopg2.extras.DictCursor)
if __name__ == '__main__': logging.basicConfig(level=logging.INFO) try: with open("Bot/config.yaml", "r") as yamlfile: data = yaml.load(yamlfile, Loader=yaml.FullLoader) key = data['ALPACA_API_KEY'] secret_key = data['ALPACA_SECRET_KEY'] base_url = data['PAPER_URL'] stream_url = data['STREAM_URL'] data_url = data['DATA_URL'] yamlfile.close() api = REST( key, secret_key, base_url ) conn = Stream( key, secret_key, base_url=data_url, data_stream_url=stream_url, data_feed='sip', raw_data=True, kafka_enabled=False ) asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) loop = asyncio.get_event_loop()
kafka_enabled=False) conn.run() if __name__ == '__main__': conn = None logging.basicConfig(level=logging.INFO) try: with open("Bot/config.yaml", "r") as yamlfile: data = yaml.load(yamlfile, Loader=yaml.FullLoader) key = data['ALPACA_API_KEY'] secret_key = data['ALPACA_SECRET_KEY'] base_url = data['PAPER_URL'] yamlfile.close() api = REST(key, secret_key, base_url) conn_thread = threading.Thread(target=alpaca_thread_run) conn_thread.daemon = True conn_thread.start() symbol_brain_dic = {} while True: symbol = input() if symbol != 'STOP' and symbol != 'STOP_ALL': req_symbols = symbol.split(',') print(req_symbols) for symbol in req_symbols: data = api.get_asset(symbol) # print(data) if data.shortable and data.easy_to_borrow:
class HistoricalDataFetcher(object): def __init__(self): with open("Bot/config.yaml", "r") as yamlfile: data = yaml.load(yamlfile, Loader=yaml.FullLoader) # self.key = data['LIVE_API_KEY'] # self.secret_key = data['LIVE_SECRET_KEY'] self.key = data['ALPACA_API_KEY'] self.secret_key = data['ALPACA_SECRET_KEY'] self.base_url = data['PAPER_URL'] self.DB_HOST = data['DB_HOST'] self.DB_USER = data['DB_USER'] self.DB_PASS = data['DB_PASS'] self.DB_NAME = data['DB_NAME'] yamlfile.close() # The connection to the Alpaca API self.api = REST(self.key, self.secret_key, self.base_url) self.connection = psycopg2.connect(host=self.DB_HOST, database=self.DB_NAME, user=self.DB_USER, password=self.DB_PASS) self.connection.autocommit = True self.cursor = self.connection.cursor( cursor_factory=psycopg2.extras.DictCursor) def create_trade_table(self, symbol): self.trade_table = str(symbol + "_T").lower() self.cursor.execute( "select exists(select * from \ information_schema.tables where table_name=%s)", (self.trade_table, )) if not bool(self.cursor.fetchone()[0]): # Create table statement # add trading_session (ENUM SESSION), trading_day (DATE) sqlCreateTable = "create table " + self.trade_table + " \ (time TIMESTAMP WITH TIME ZONE NOT NULL, \ price NUMERIC NOT NULL, size INTEGER NOT NULL, \ trade_id BIGINT NOT NULL, trading_day \ DATE NOT NULL, trading_session \ SESSION NOT NULL, UNIQUE(time, trade_id));" self.cursor.execute(sqlCreateTable) sqlCreateHypertable = ("SELECT create_hypertable('" + self.trade_table + "', 'time', chunk_time_interval => \ INTERVAL '1 day');") self.cursor.execute(sqlCreateHypertable) def create_quote_table(self, symbol): self.quote_table = symbol + "_Q" # Create table statement sqlCreateTable = "create table " + self.quote_table + " \ (time TIMESTAMP WITH TIME ZONE NOT NULL, \ price DOUBLE PRECISION NOT NULL, size DOUBLE PRECISION \ NOT NULL, trade_id DOUBLE PRECISION NOT NULL, \ UNIQUE(time, trade_id));" # Create a table in PostgreSQL database self.cursor.execute(sqlCreateTable) self.connection.commit() sqlCreateHypertable = ("SELECT create_hypertable('" + self.quote_table + "', 'time', \ chunk_time_interval => INTERVAL '1 day');") self.cursor.execute(sqlCreateHypertable) self.connection.commit() sqlCreateIndex = ("CREATE INDEX ON " + self.quote_table + "(time, \ trade_id) WITH \ (timescaledb.transaction_per_chunk);") self.cursor.execute(sqlCreateIndex) self.connection.commit() def persist_trades(self, symbol, start, end): self.create_trade_table(symbol) trades = self.api.get_trades_iter(symbol, start, end, throttle=3) for trade in trades: trade_date = parse(trade.t) trading_day = trade_date.date() trading_time = trade_date.time() trading_session = 'D' if (trading_time <= datetime.strptime('01:00:00', '%H:%M:%S').time() and trading_time >= datetime.strptime( '00:00:00', '%H:%M:%S').time()): trading_day = trading_day - timedelta(days=1) trading_session = 'AH' if (trading_time < datetime.strptime('14:30:00', '%H:%M:%S').time() and trading_time > datetime.strptime( '01:00:00', '%H:%M:%S').time()): trading_session = 'BH' elif (trading_time > datetime.strptime('21:00:00', '%H:%M:%S').time()): trading_session = 'AH' print(symbol + " , " + trade.t + " , " + str(trade.p) + " , " + str(trade.s) + " , " + str(trade.i) + " , " + str(trading_day) + " , " + trading_session) self.cursor.execute( "INSERT INTO " + self.trade_table + " (time,\ price, size, trade_id, trading_day, trading_session) VALUES \ (%s, %s, %s, %s, %s, %s) ON CONFLICT DO NOTHING", (trade.t, trade.p, trade.s, trade.i, trading_day, trading_session)) def destroy(self): trader.cursor.close() trader.connection.close()