def stat_update(): """Task to get latest time and market status. Run this every 5 seconds.""" d = timezone.localtime() # NY time in Python format if MktInfo.objects.first( ) is not None: # make sure there exists an object in table if (((d.minute == 0) or (d.minute == 30)) and d.second < 15): # at possible mkt status boundary? # We have limited quota with polygon, so want to minimize how frequently we check! key = os.getenv("APCA_API_KEY_ID") # get status from polygon with RESTClient(key) as client: resp = client.reference_market_status() MktInfo.objects.all().delete( ) # get rid of previous db table entry m = MktInfo(mktStatus=resp.market, lastCheckTimeNY=d) m.save() # and create new db entry return else: # We are not at time when market status would change. just update time. m = MktInfo.objects.first() m.lastCheckTimeNY = d m.save() return else: # first time here! No object yet in database, so create it. key = os.getenv("APCA_API_KEY_ID") # get status from polygon with RESTClient(key) as client: resp = client.reference_market_status() m = MktInfo(mktStatus=mktstatus, lastCheckTimeNY=d) m.save() # and create new db entry - current time and status return
def csvframer(ticker: str, folder: str) -> list(): with RESTClient(POLYGON_API_KEY) as client: response = client.stocks_equities_aggregates(ticker, 1, "DAY", "1111-11-11", "2019-12-12") data = response.results df = pd.DataFrame(data) start = ((datetime.today() - timedelta(days=2)) - timedelta(days=len(data))) starter = start.strftime('%Y-%m-%d') df.index = pd.to_datetime(df.index, origin=pd.Timestamp(starter), unit='d') df.rename(columns={ "o": "Open", "h": "High", "l": "Low", "c": "Close", "v": "Volume" }, inplace=True) df.index.name = 'Date' del df['vw'] del df['t'] del df['n'] df = df[['Open', 'High', 'Low', 'Close', 'Close', 'Volume']] df.to_csv(folder + ticker + ".csv")
def refresh_prices(): """Periodic task to update stock prices in database in background.""" STOCKS_PER_PASS = 5 # number of tickers to handle each time through here key = os.getenv("APCA_API_KEY_ID") with RESTClient(key) as client: assetsall = Asset.objects.order_by('-lastLook')[:STOCKS_PER_PASS] for a in assetsall: astr = a.assetSymbol.upper() if astr != "CASH": resp = client.stocks_equities_previous_close(astr) prevclose = resp.results[0]['c'] resp = client.stocks_equities_last_trade_for_a_symbol(astr) lastprice = resp.last.price print( f"{astr}, status={resp.status} prevclose={prevclose}, lasttrade={lastprice}" ) if resp.status == 'success': a.lastPrice = lastprice a.openingPrice = prevclose a.lastLook = timezone.localtime() a.save() else: print(f"unable to update {astr}") print(f"refresh_prices processed {len(assetsall)} price updates") return
def main(from_, to, ticker, timespan): key = 'Wkop2OVwv6zvM63pGEJS0muaRPpUODm8' # RESTClient can be used as a context manager to facilitate closing the underlying http session # https://requests.readthedocs.io/en/master/user/advanced/#session-objects with RESTClient(key) as client: from_ = "2019-01-31" to = "2019-02-01" ticker = "AAPL" timespan = "day" resp = client.stocks_equities_aggregates(ticker, 1, timespan, from_, to, unadjusted=False) print(f"Day aggregates for {resp.ticker} between {from_} and {to}.") for result in resp.results: dt = ts_to_datetime(result["t"]) print( f"{dt}\n\tO: {result['o']}\n\tH: {result['h']}\n\tL: {result['l']}\n\tC: {result['c']} " )
def updates(): with RESTClient(apiKey) as client: return7days = (datetime.now() - timedelta(days=1)).date() # return7days = (datetime.now() - timedelta(days=20)).date() response = client.stocks_equities_grouped_daily('us', 'stocks', return7days, unadjusted=True) api_results = response.results quotes = list() for result in api_results: quotes.append( Quote( id=uuid.uuid4(), ticker=result.get('T'), volume=result.get('v'), volume_weighted_avg_price=result.get('vw'), open=result.get('o'), adj_close=result.get('c'), close=result.get('c'), high=result.get('h'), low=result.get('l'), date_time=ts_to_date(result.get('t')), )) save_quotes(quotes) return render_template("updates.html", source=quotes)
def trade(self): """ This function when called begins the cycle of trading. Note that the data the system has access to will be the beginDate - lookback, but the first trade will be initiated as if it was done during beginDate The Eventual goal for this will be to implement some controls over it in tkinter :return: """ client = RESTClient(self.api_key) tickers = self.portfolio.tickers self.__correct_begin_date() for stock in tickers: response = client.stocks_equities_aggregates(stock, 1, self.bar_distance, self.begin_date - datetime.timedelta(days=1), self.end_date + datetime.timedelta(days=1)) if response.results is None: #Make sure that data is actually gotten raise Exception("Unable to retrieve market data") self.asset[stock] = response.results while self.date_offset + self.look_back.days < len(self.asset[tickers[0]]) - 1: #TODO Is every stock going to have the exact same amount of days? truncated_data = dict() for stock in tickers: truncated_data[stock] = self.asset[stock][self.date_offset:self.look_back.days + self.date_offset] #Creates the set of data that only includes the current lookback period self.trading_func(truncated_data) self.date_offset += 1 self.__calc_pl()
def fetch_historical_data(ticker_list): today = date.today() df_eod = pd.DataFrame() ticker_batches = chunks(ticker_list, 5) print('fetching data') for batch in ticker_batches: with RESTClient(key) as client: for ticker in batch: resp = client.stocks_equities_aggregates( ticker, 1, timespan="day", from_="2019-01-01", to=today.strftime("%Y-%m-%d")) df_stock_agg = pd.DataFrame(resp.results) #set datatype and use time column as index #df_stock_agg=df_stock.astype({'t': 'datetime64[ns]'}).set_index('t'); df_eod[ticker] = df_stock_agg['c'].values if df_eod.index.name != 'time': df_eod['time'] = pd.to_datetime(df_stock_agg['t'], unit='ms') df_eod['time'] = df_eod['time'].dt.normalize() df_eod = df_eod.set_index('time') df_eod.to_csv('portfolio_eod_data.csv') return df_eod[ticker_list]
def snapshot_all_tickers(request): if request.method == 'GET': with RESTClient(auth_key='u8arVdihlX_6p_pRuvRUwa94YmI4Zrny') as client: rep = client.stocks_equities_snapshot_all_tickers() tickers = rep.tickers trades = [{t['ticker']: t['date']} for t in tickers] return JsonResponse({'tickers': trades}, safe=False, )
def main(): key = "your api key" client = RESTClient(key) resp = client.stocks_equities_daily_open_close("AAPL", "2018-3-2") print( f"On: {resp.from_} Apple opened at {resp.open} and closed at {resp.close}" )
def get_minute_range_data(self, ticker_name: str, from_date: str, to_date: str) -> pd.DataFrame: """ This is capable of return a large pandas dataframe of minute data of a single stock/ equity :param ticker_name: A valid ticker name. This is case sensitive. When in doubt, double check the spelling using the method client.reference_tickers(). Calling this method once over a time range of one month takes about 1.7 seconds on my computer. :param from_date: a starting date in the form YYYY-MM-DD :param to_date: an ending date in the form YYYY-MM-DD :return: pandas dataframe """ print(f"Getting the data for {ticker_name} from {from_date} to {to_date}") start_date = datetime.strptime(from_date, "%Y-%m-%d") end_date = datetime.strptime(to_date, "%Y-%m-%d") print(f"Estimated time: {round((end_date - start_date).days * 1.7 / 30, 2)} seconds." f"\nInternet may vary greatly") repeat = 1 days_change = 5 # more than 5 days of minute data may be more than 5000 data points and polygon wont send more than that # so i need to break up the range in to bite sizes chunks then append them together locally if (end_date - start_date).days > days_change: repeat = (end_date - start_date).days // days_change mid_date = start_date + timedelta(days=days_change) else: mid_date = end_date start_time = time.time() df = None with RESTClient(self.__alpaca_key) as client: for i in range(repeat): resp = client.stocks_equities_aggregates(ticker_name, 1, "minute", start_date.strftime("%Y-%m-%d"), mid_date.strftime("%Y-%m-%d")) # resp.results is a list of dictionaries, with each dictionary representing a day # changes 't' from miliseconds since 1970 to datetime in isoformat for clarity for result in resp.results: result['t'] = datetime.fromtimestamp(result["t"] / 1000).isoformat() temp_df = pd.DataFrame.from_dict(resp.results) if df is None: df = temp_df else: df = pd.concat([df, temp_df]) # change range to request to polygon start_date = mid_date + timedelta(days=1) # avoid getting the same data mid_date = start_date + timedelta(days=days_change) # edge cases if mid_date > end_date: mid_date = end_date if start_date > end_date: start_date = end_date print(f"time to complete is {time.time() - start_time} s") df.reset_index(drop=True, inplace=True) return df
def main(): key = "w8NGBOgXjp393pMbIBhHO6sOxN_N1x8E" # RESTClient can be used as a context manager to facilitate closing the underlying http session # https://requests.readthedocs.io/en/master/user/advanced/#session-objects with RESTClient(key) as client: resp = client.stocks_equities_daily_open_close("FB", "2020-11-17") print( f"On: {resp.from_} Apple opened at {resp.open} and closed at {resp.close}" )
def main(): key = "your api key" # RESTClient can be used as a context manager to facilitate closing the underlying http session # https://requests.readthedocs.io/en/master/user/advanced/#session-objects with RESTClient(key) as client: resp = client.stocks_equities_daily_open_close("AAPL", "2018-03-02") print( f"On: {resp.from_} Apple opened at {resp.open} and closed at {resp.close}" )
def main(): key = 'PjeqU9zauMH9o49WYWurfZslqfY8HpF7' #API CALL with RESTClient(key) as client: ''' This block is for reference purposes only # Note that Q results are off by 1 fiscal year, bug currently being worked on resp = client.stocks_equities_daily_open_close("AAPL","2018-03-02") print(f"on: {resp.from_} Apple opened at {resp.open} and closed at {resp.close}") resp = client.reference_stock_financials("MSFT",limit=1,type='Q') print(f"MSFT market cap is {resp.results[0].get('marketCapitalization')} as reported on {resp.results[0].get('reportPeriod')}.") custom_limit=100 resp = client.reference_stock_financials("MSFT",limit=100,type='Q') for i in range(custom_limit): print('*'*50) print(f"MSFT market cap is {resp.results[i].get('marketCapitalization')} as reported on {resp.results[i].get('reportPeriod')}.") print(f"MSFT debt to equity ratio is {resp.results[i].get('debtToEquityRatio')} as reported on {resp.results[i].get('reportPeriod')}.") print(f"MSFT divident yield is {resp.results[i].get('dividendYield')} as reported on {resp.results[i].get('reportPeriod')}.") print(f"MSFT gross profit is {resp.results[i].get('grossProfit')} as reported on {resp.results[i].get('reportPeriod')}.") print(f"MSFT net income is {resp.results[i].get('netIncome')} as reported on {resp.results[i].get('reportPeriod')}.") print(f"MSFT revenues in USD is {resp.results[i].get('revenuesUSD')} as reported on {resp.results[i].get('reportPeriod')}.") print(f"MSFT operating income is {resp.results[i].get('operatingIncome')} as reported on {resp.results[i].get('reportPeriod')}.") print('*'*50) print("Testing completed for stock financials. Beginning testing for databasing.") print("="*50) ''' #Get a list of all tickers list_of_all_tickers = gt.get_tickers(NYSE=True, NASDAQ=True, AMEX=False) #Set flags to keep track of db initializations DB_EXISTENCE_FLAGS = [False, False, False] #Initialize the root db if os.path.isfile("Root_Database.csv"): print("Root DB found, aborting init operation.") else: if (init_root_db(list_of_all_tickers)): DB_EXISTENCE_FLAGS[0] = True print("Root DB has been successfully initialized.") else: print("Root DB initialization has failed.") #Initialize the info db if os.path.isfile("Info_Database1.csv"): print("Info DB found, aborting init operation.") else: if (init_info_db(key, list_of_all_tickers)): DB_EXISTENCE_FLAGS[1] = True print("Info DB has been successfully initialied.") else: print("Info DB initialization has failed.")
def polygon_open_and_close_aggregate_api(symbol, from_, end_): with RESTClient(auth_key='u8arVdihlX_6p_pRuvRUwa94YmI4Zrny') as client: rep = client.stocks_equities_aggregates(ticker=symbol, multiplier=1, timespan='week', from_=from_, to=end_, limit=5000) if hasattr(rep, 'results'): for data in rep.results: print((datetime.fromtimestamp(int(data['t']) / 1000.0)), data['v'])
def getFinancial(key, ticker, infoSpec): #API CALL with RESTClient(key) as client: resp = client.reference_stock_financials(ticker, limit=1, type='Q') try: print( f"{ticker} {infoSpec} is {resp.results[0].get(infoSpec)} as reported on {resp.results[0].get('reportPeriod')}." ) return resp.results[0].get(infoSpec) except: print(f"No {infoSpec}.") return "DNE" return 0
def record(args): key = args.key tickers = pd.read_csv(args.tickers)['Symbol'].to_list() if args.end == '': to = pd.to_datetime(pd.Timestamp.today()) else: to = pd.to_datetime(args.end) if args.start == '': from_ = to - pd.Timedelta('730d') else: from_ = pd.to_datetime(args.start) for ticker in tickers: print(f"Working on {ticker}") daterange = pd.date_range(from_, to, freq='3M').strftime('%Y-%m-%d') daterange = daterange.append(pd.Index([to.strftime('%Y-%m-%d')])) ticker_path = Path(ticker).resolve() ticker_path.mkdir(exist_ok=True) list_for_df = [] for i in range(1, len(daterange)): start = daterange[i - 1] end = daterange[i] temp_file = Path(ticker_path / f'{ticker}_1m_ohlcv_{start}_{end}.pkl') if temp_file.is_file(): print(f'Skipping: {ticker}_1m_ohlcv_{start}_{end}.pkl') continue with RESTClient(key) as client: if args.free: sleep( 15 ) # Sleep 15 seconds to submit ~5 requests per second for free Polygon account resp = client.stocks_equities_aggregates(ticker, 1, "minute", start, end, unadjusted=False, limit=50000) try: list_for_df.extend(resp.results) df_temp = pd.DataFrame(resp.results) except AttributeError: print( f'Can\'t process: {ticker}_1m_ohlcv_{start}_{end}.pkl') continue assert len(df_temp) <= 50000 print(f'Saving: {ticker}_1m_ohlcv_{start}_{end}.pkl') df_temp.to_pickle(temp_file)
def fetch_quotes(self, symbol): closes = {} with RESTClient(self.key) as client: resp = client.stocks_equities_aggregates(symbol, 1, "day", "2017-01-01", "2020-12-01", unadjusted=True) for result in resp.results: day = arrow.get(result["t"]).format("YYYY-MM-DD") closes[day] = result["c"] self.quotes[symbol] = closes
def testPolygon(ticker): key = os.environ['POLYGON_KEY'] t = ticker if ticker in ['BRK-B', 'BF-B']: t = ticker.replace('-','.') with RESTClient(key) as client: response = client.reference_stock_financials(t, limit=1, type="Y") attribute = ['ticker','revenuesUSD', 'marketCapitalization', 'grossProfit', 'netCashFlowFromOperations', 'EBITDAMargin', 'debtToEquityRatio'] res = dict.fromkeys(attribute) for i in attribute: if i not in ['ticker', 'EBITDAMargin', 'debtToEquityRatio']: res[i] = millify(response.results[0][i]) else: res[i] = response.results[0][i] return res
def polygon_open_and_close_api(symbol, from_, end_): records = JobRecord.objects.filter(name="open_and_close", type='daily') dates = set([ t.strftime('%Y-%m-%d') for t in pd.bdate_range(start=from_, end=end_) ]) if len(records) > 0: record_date = set([record.date for record in records]) needed_date = dates.difference(record_date) else: needed_date = dates with RESTClient(auth_key='u8arVdihlX_6p_pRuvRUwa94YmI4Zrny') as client: for date in needed_date: try: rep = client.stocks_equities_daily_open_close(symbol=symbol, date=date) if hasattr(rep, 'symbol'): openAndClose = OpenClose(symbol=symbol, date=date, open=rep.open, high=rep.high, low=rep.low, close=rep.close, volume=rep.volume) text = f"Symbol:{symbol} at date {date}".format( symbol=symbol, date=date) print(text) openAndClose.save() try: job = JobRecord( date=date, name="open_and_close", type="daily", ) job.save() print( f"Job_Record: Job record save successfully on {date}" .format(date=date)) except: print(f"Job_Record: Job record save failed on {date}". format(date=date)) except: print(f"Symbol:{symbol} has no trading data on {date}".format( symbol=symbol, date=date))
def getDailyOpenClose(key, ticker, status): #Note that there is a delay between recordings of daily close, ie. find close from previous day not day of #API CALL with RESTClient(key) as client: if status == 0: resp = client.stocks_equities_daily_open_close( ticker, convertDateTimeToString(datetime.date.today() - timedelta(days=1))) try: print( f"{ticker} {status} on: {resp.from_} opened at {resp.open}." ) return resp.close except: print(f"No {status} for {ticker}") return "DNE" elif status == 1: refLow = [] resultList = [] from_ = convertDateTimeToString(datetime.date.today() - timedelta(days=365)) to = convertDateTimeToString(datetime.date.today() - timedelta(days=1)) resp = client.stocks_equities_aggregates(ticker, 1, "day", from_, to) try: for info in resp.results: refLow.append(info.get("l")) #resultList{0=prev day close, 1=52wlow} prevClose = resp.results[len(resp.results) - 1].get("c") yearLow = min(refLow) resultList.append(prevClose) resultList.append(yearLow) print( f"{ticker} status: {status}| on: {to} closed at {str(prevClose)}." ) print( f"{ticker} status: {status}| 52 week low is {str(yearLow)}." ) return resultList except: print(f"No {status} for {ticker}") return "DNE" return 0
def get_dividends(api_key, tickers, **query_params): ''' Call Polygon API `reference/dividends` for input tickers and display a summary of the results ''' with RESTClient(api_key) as client: dividends = {} for symbol in tickers: symbol = symbol.upper() resp = client.reference_stock_dividends(symbol, **query_params) dividends[symbol] = { 'count': resp.count, 'results': resp.results, 'last': _get_last_dividend(resp.results), 'next': _get_next_dividend(resp.results) } return dividends
def main(): # Connect to database db = mysql.connect(host = "localhost", user = "******", passwd = "Jn2790472015$", database = "data") cursor = db.cursor() # API Key and client connection key = "d25I_1rdsjEHIan3ZoLXiGv_3mfcLY0wRkDpv9" client = RESTClient(key) # Get headers and declare list of unique identifiers found in polygon ticker data headers = getHeaders(client) potential_codes = ['cik', 'figiuid', 'scfigi', 'cfigi', 'figi'] # Declare page - set to 1 to start loop on first page (50 items max per page as per Polygon documentation) page = 1 # Continue to generate API requests with increasing page number while length of the API response remains non-zero while (page == 1 or len(resp.tickers) > 0): resp = client.reference_tickers(page=page) page += 1 # For page of results clean the data and serialize to list for insert statement for result in resp.tickers: data_line = [] keys = cleanTickerKeys(result) # Lining up JSON values with serialized headers for header in headers: if header in keys: if header in potential_codes: data_line.append(result['codes'][header]) else: data_line.append(result[header]) else: data_line.append('NULL') # Attempt to run MySQL insert statement, if it fails skip and continue try: cursor.execute(tickerInsertStatement(headers,data_line)) db.commit() print(f'{data_line[1]} ({data_line[0]}): Successfully inserted') except: print('SQL Error') pass
def _prefetch_tickers(self, symbol): closes = [] print() with RESTClient(self.key) as client: cursor = arrow.get("2019-01-01") to = arrow.get("2020-12-01") while cursor < to: start, end = cursor.format('YYYY-MM-DD'), to.format('YYYY-MM-DD') resp = client.stocks_equities_aggregates(symbol, 1, "minute", start, end, unadjusted=False) for result in resp.results: closes.append((result["t"], result['c'])) print(f"\rfetching {symbol}: {self.ts_to_datetime(result['t'])}", end="") cursor = arrow.get(result["t"]) print() return sorted(closes)
def populateHistoricalData(key, cursor, from_, to, symbols): sql = """INSERT INTO public."TICK_SENT"(symbol,volume,volume_weighted_average_price,open_price,close_price,high_price,low_price,start_time) VALUES ('{}', {}, {}, {}, {}, {}, {}, {});""" with RESTClient(key) as client: for symbol in symbols: resp = client.stocks_equities_aggregates(symbol, 1, "minute", from_, to, unadjusted=False) print( f"Minute aggregates for {resp.ticker} between {from_} and {to}." ) for result in resp.results: print(result) insertStatement = sql.format(symbol, result['v'], result['vw'], result['o'], result['c'], result['h'], result['l'], result['t']) cursor.execute(insertStatement)
def main(): key = "pbY1RM6pzUSJr4dxTJ0oo3SpZSw7ZxYq" # RESTClient can be used as a context manager to facilitate closing the underlying http session # https://requests.readthedocs.io/en/master/user/advanced/#session-objects # price to earnings ratio priceToEarningsRatio # Earnings before earningsBeforeInterestTaxesDepreciationAmortizationUSD # stock financials # List of functions found here: # https://github.com/polygon-io/client-python/blob/master/polygon/rest/client.py with RESTClient(key) as client: apple = getFinancials(client, "AAPL") getClose(client, "AAPL") print(apple.EBT) amd = getFinancials(client, "AMD") print(amd.PTE) print(amd.reportPeriod)
def _download_data(self, ticker: str, from_: dt.datetime, to: dt.datetime) -> List[Dict[str, Union[float, str]]]: """Download data from an extra source. Now, polygon.io is supported.""" days = self._batch_limit // 60 // 24 start_date_ = from_ end_date_ = min(start_date_ + dt.timedelta(days=days), to) data = [] with RESTClient(self._polygon_api_key) as api_client: while start_date_ != to: limit = int((end_date_ - start_date_).total_seconds() // 60) response = api_client.stocks_equities_aggregates( ticker=ticker, from_=DateTimeHelper.date_to_string(start_date_.date()), to=DateTimeHelper.date_to_string(end_date_.date()), multiplier=1, timespan='minute', limit=limit, ) data.extend( [ { 'open_price': candlestick['o'], 'close_price': candlestick['c'], 'low_price': candlestick['l'], 'high_price': candlestick['h'], 'volume': candlestick['v'], 'weighted_volume': candlestick['vw'], 'datetime': DateTimeHelper.datetime_to_string( DateTimeHelper.timestamp_to_datetime_utc(candlestick['t'] // 1000) ), } for candlestick in response.results ] ) start_date_ = end_date_ end_date_ = min(end_date_ + dt.timedelta(days=days), to) return data
def get_daily_data(self, ticker_name: str, start_date: str, end_date: str) -> pd.DataFrame: """ Get the daily open, close, high, low for a given stock. From what I see, it will go back until ~2003 NOt authorized to work with Crypto or Forex """ if not (verify_date_string(start_date) and verify_date_string(end_date)): print( "The date string must be in the format of YYYY-MM-DD. Year-Month-Day" ) return None with RESTClient(self.__alpaca_key) as client: resp = client.stocks_equities_aggregates(ticker_name, 1, "day", start_date, end_date) for result in resp.results: result['t'] = datetime.fromtimestamp(result["t"] / 1000).isoformat() return pd.DataFrame.from_dict(resp.results)
def __init__(self): # this will get the computer's environment variable and load it in. # this is safely store alpaca keys self.__alpaca_key = os.environ[self.poly_api_key_name] # the polygon will only allow for 5000 items in a response. If asking for minute data, 5 days may result in # about 5000 data points. If asking for a larger time range than this 5000 data point limit, then the beginning # date will be what the result is calculated on. If asking for a range of data, doesn't include the # day at the start of the range with RESTClient(self.__alpaca_key) as client: try: resp = client.reference_tickers(market="stocks", active=True) if len(resp.results) == 0: print("No result matching the given parameters.") except exceptions.HTTPError as e: if "401" in e.args[0]: print("Unauthorized Key -> ", str(e)) # raising an error inside the __init__ will stop any object creation and or data leakage. raise exceptions.HTTPError
def get_marketstatus(api_key, **query_params): ''' Call Polygon API `marketstatus` and print the resulting data onscreen. eg, ''' with RESTClient(api_key) as client: resp = client.reference_market_status(**query_params) nyse = resp.exchanges['nyse'] nasdaq = resp.exchanges['nasdaq'] otc = resp.exchanges['otc'] fx = resp.currencies['fx'] crypto = resp.currencies['crypto'] template = (f'As of {resp.serverTime}\n' f' Global Crypto:\t{crypto}\n' f' Global FX:\t\t{fx}\n' f' US Stocks:\t\t{resp.market}\n' f'\tNYSE:\t\t{nyse}\n' f'\tNASDAQ:\t\t{nasdaq}\n' f'\tOTC:\t\t{otc}\n') print(template) return resp
def main(): key = "your api key" # RESTClient can be used as a context manager to facilitate closing the underlying http session # https://requests.readthedocs.io/en/master/user/advanced/#session-objects with RESTClient(key) as client: from_ = "2019-01-01" to = "2019-02-01" resp = client.stocks_equities_aggregates("AAPL", 1, "minute", from_, to, unadjusted=False) print(f"Minute aggregates for {resp.ticker} between {from_} and {to}.") for result in resp.results: dt = ts_to_datetime(result["t"]) print( f"{dt}\n\tO: {result['o']}\n\tH: {result['h']}\n\tL: {result['l']}\n\tC: {result['c']} " )