def update_ma(range1, range2, stock_picker): #start = datetime.strptime(start_date[:10], '%Y-%m-%d') #end = datetime.strptime(end_date[:10], '%Y-%m-%d') df = StooqDailyReader(stock_picker, start="2020-01-01").read() df = df.sort_values(by='Date') rolling_mean1 = df['Close'].rolling(window=range1).mean() rolling_mean2 = df['Close'].rolling(window=range2).mean() trace1 = go.Scatter(x=df.index, y=df['Close'], mode='lines', name=stock_picker) trace_a = go.Scatter(x=df.index, y=rolling_mean1, mode='lines', yaxis='y', name=f'SMA {range1}') trace_b = go.Scatter(x=df.index, y=rolling_mean2, mode='lines', yaxis='y', name=f'SMA {range2}') layout1 = go.Layout({ 'title': 'Stock Price With Moving Average', "legend": { "orientation": "h", "xanchor": "left" }, "xaxis": { "rangeselector": { "buttons": [{ "count": 6, "label": "6M", "step": "month", "stepmode": "backward" }, { "count": 1, "label": "1Y", "step": "year", "stepmode": "backward" }, { "count": 1, "label": "YTD", "step": "year", "stepmode": "todate" }] } } }) fig = {'data': [trace1], 'layout': layout1} fig['data'].append(trace_a) fig['data'].append(trace_b) return fig
def _update_data_from_web(self, ticker, source) -> None: """ download and merge data into database start date is the last updated date + 1 and end date is today """ # check if source is available for the ticker if self._get_number_of_trials(ticker, source) > 3: print(f'{ticker} from {source} is skipped', flush=True) return suffix, relative_limit, processor = source_map[source] # configure start, end dates today = pd.Timestamp.today() - rdate("1b") start_date = self._get_last_date_from_db(ticker, source) + rdate("1b") if relative_limit is not None and start_date < today - relative_limit: start_date = today - relative_limit end_date = today # check date if start_date > end_date: return # download data and pre-process if needed if source == "stooq": data = StooqDailyReader(ticker + suffix, start_date, end_date).read() if data.shape[0] == 0: self._increment_trial_count(ticker, source) return else: try: data = web.DataReader(ticker + suffix, source, start_date, end_date) except KeyError: self._increment_trial_count(ticker, source) return if processor is not None: processor(data) data.index = data.index.astype( "datetime64[ns]") # for some sources, string can be return sdate, edate = parse_time(data.index.min()), parse_time( data.index.max()) print( f"Downloaded {ticker} from {sdate.date()} to {edate.date()} totally {data.shape[0]:d} bars via {source}", flush=True) data["source"] = source data.to_sql(ticker, con=self.conn, index=True, index_label="date", if_exists="append")
def get_rsi( start=datetime(2020, 1, 1), end=datetime(2020, 12, 31), brand='1305.JP', n=14, ): stooq = StooqDailyReader(brand, start=start, end=end) data = stooq.read() #input data must be pandas dataframe diff = data["Close"].diff() up = diff.copy() up[up < 0] = 0 down = diff.copy() down[down > 0] = 0 up_sma_14 = up.rolling(window=n, center=False).mean() down_sma_14 = down.abs().rolling(window=n, center=False).mean() RSI = (up_sma_14) / (down_sma_14 + up_sma_14) return RSI * 100, data["Close"]
def update_graph(stock_ticker): df = StooqDailyReader(stock_ticker, start="2020-01-01").read() df_box = df[["Close"]] fig = { "data": px.box(df_box, y=df_box.columns), 'layout': { 'title': { 'text': 'DISPLAY ME!' } } } return fig
def update_graph(n_clicks, start_date, end_date, stock_ticker): #Convert passed date variables to date type since they change to string when they are paased to this function. start = dt.strptime(start_date[:10], '%Y-%m-%d') end = dt.strptime(end_date[:10], '%Y-%m-%d') #Create session variable for StooqDailyReader just in case if you got stuck behind company firewall. session = requests.Session() session.verify = False #Query each selected tick from Nasdaq and append its trace to traces list traces = [] for tic in stock_ticker: data = StooqDailyReader(tic, start=start, end=end, session=session) df = data.read() traces.append({ 'x': df.index, 'y': df['Close'], 'name': tic, 'hoverinfo': 'text', 'text': df['Close'] }) #Create figure object to pass to main layout stocks = ['<b>' + stock + '</b>' for stock in stock_ticker] fig = { 'data': traces, 'layout': { 'title': ', '.join(stocks) + ' Closing Prices', 'hovermode': 'closest', 'xaxis': { 'title': 'Time', 'showline': True }, 'yaxis': { 'title': 'Price (USD)', 'showline': True } } } return fig
def update_graph(n_clicks, stock_ticker, start_date, end_date): start = datetime.strptime(start_date[:10], '%Y-%m-%d') end = datetime.strptime(end_date[:10], '%Y-%m-%d') traces = [] df = pd.DataFrame() for tic in stock_ticker: #df = yf.Ticker(tic).history(start = start, end = end) df[tic] = StooqDailyReader(tic, start=start, end=end).read()["Close"] corr = df.corr() fig = px.imshow( corr, color_continuous_scale="Greens", title='Korelacje cen zamknięcia wybranych spółek notowanych na WIG') return fig
def update_graph(n_clicks, stock_ticker, start_date, end_date): start = datetime.strptime(start_date[:10], '%Y-%m-%d') end = datetime.strptime(end_date[:10], '%Y-%m-%d') traces = [] df = pd.DataFrame() for tic in stock_ticker: #df = yf.Ticker(tic).history(start = start, end = end) df[tic] = StooqDailyReader(tic, start=start, end=end).read()["Close"] column_maxes = df.max() df_max = column_maxes.max() column_mins = df.min() df_min = column_mins.min() normalized_df = (df - df_min) / (df_max - df_min) fig = px.line(normalized_df, x=normalized_df.index, y=normalized_df.columns, title='Ceny zamknięcia wybranych spółek notowanych na WIG') return fig
def update_graph(stock_ticker): df = StooqDailyReader(stock_ticker, start="2020-01-01").read() fig = { "data": go.Figure(data=[ go.Candlestick(x=df.index, open=df['Open'], high=df['High'], low=df['Low'], close=df['Close']) ]), 'layout': { 'xaxis': { 'title': 'Data' }, 'yaxis': { 'title': 'Cena zamknięcia' } } } return fig
def update_return(stock_picker): #start = datetime.strptime(start_date[:10], '%Y-%m-%d') #end = datetime.strptime(end_date[:10], '%Y-%m-%d') df = StooqDailyReader(stock_picker, start="2020-01-01").read() df_wig = StooqDailyReader("WIG.PL", start="2020-01-01").read() df = df.sort_values(by='Date') df_wig = df_wig.sort_values(by='Date') stocks = pd.DataFrame({ "Date": df.index, str(stock_picker): df["Close"], "WIG": df_wig["Close"] }) stocks = stocks.set_index('Date') stock_return = stocks.apply(lambda x: ((x - x[0]) / x[0]) * 100) trace2 = go.Scatter(x=stock_return.index, y=stock_return[str(stock_picker)], mode='lines', name=str(stock_picker)) trace3 = go.Scatter(x=stock_return.index, y=stock_return['WIG'], mode='lines', name="WIG") fig = { 'data': [trace2], 'layout': { 'xaxis': { 'title': 'Data' }, 'yaxis': { 'title': 'Cena zamknięcia' } } } fig['data'].append(trace3) return fig
def DataReader( name, data_source=None, start=None, end=None, retry_count=3, pause=0.1, session=None, api_key=None, ): """ Imports data from a number of online sources. Currently supports Google Finance, St. Louis FED (FRED), and Kenneth French's data library, among others. Parameters ---------- name : str or list of strs the name of the dataset. Some data sources (IEX, fred) will accept a list of names. data_source: {str, None} the data source ("iex", "fred", "ff") start : {datetime, None} left boundary for range (defaults to 1/1/2010) end : {datetime, None} right boundary for range (defaults to today) retry_count : {int, 3} Number of times to retry query request. pause : {numeric, 0.001} Time, in seconds, to pause between consecutive queries of chunks. If single value given for symbol, represents the pause between retries. session : Session, default None requests.sessions.Session instance to be used api_key : (str, None) Optional parameter to specify an API key for certain data sources. Examples ---------- # Data from Google Finance aapl = DataReader("AAPL", "iex") # Price and volume data from IEX tops = DataReader(["GS", "AAPL"], "iex-tops") # Top of book executions from IEX gs = DataReader("GS", "iex-last") # Real-time depth of book data from IEX gs = DataReader("GS", "iex-book") # Data from FRED vix = DataReader("VIXCLS", "fred") # Data from Fama/French ff = DataReader("F-F_Research_Data_Factors", "famafrench") ff = DataReader("F-F_Research_Data_Factors_weekly", "famafrench") ff = DataReader("6_Portfolios_2x3", "famafrench") ff = DataReader("F-F_ST_Reversal_Factor", "famafrench") """ expected_source = [ "yahoo", "iex", "iex-tops", "iex-last", "iex-last", "bankofcanada", "stooq", "iex-book", "enigma", "fred", "famafrench", "oecd", "eurostat", "nasdaq", "quandl", "moex", "robinhood", "tiingo", "yahoo-actions", "yahoo-dividends", "av-forex", "av-daily", "av-daily-adjusted", "av-weekly", "av-weekly-adjusted", "av-monthly", "av-monthly-adjusted", "av-intraday", "econdb", ] if data_source not in expected_source: msg = "data_source=%r is not implemented" % data_source raise NotImplementedError(msg) if data_source == "yahoo": return YahooDailyReader( symbols=name, start=start, end=end, adjust_price=False, chunksize=25, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "iex": return IEXDailyReader( symbols=name, start=start, end=end, chunksize=25, api_key=api_key, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "iex-tops": return IEXTops( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "iex-last": return IEXLasts( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "bankofcanada": return BankOfCanadaReader( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "stooq": return StooqDailyReader( symbols=name, chunksize=25, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "iex-book": return IEXDeep( symbols=name, service="book", start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "enigma": return EnigmaReader(dataset_id=name, api_key=api_key).read() elif data_source == "fred": return FredReader( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "famafrench": return FamaFrenchReader( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "oecd": return OECDReader( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "eurostat": return EurostatReader( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "nasdaq": if name != "symbols": raise ValueError("Only the string 'symbols' is supported for " "Nasdaq, not %r" % (name, )) return get_nasdaq_symbols(retry_count=retry_count, pause=pause) elif data_source == "quandl": return QuandlReader( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, api_key=api_key, ).read() elif data_source == "moex": return MoexReader( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "robinhood": return RobinhoodHistoricalReader( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "tiingo": return TiingoDailyReader( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, api_key=api_key, ).read() elif data_source == "yahoo-actions": return YahooActionReader( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() elif data_source == "yahoo-dividends": return YahooDivReader( symbols=name, start=start, end=end, adjust_price=False, chunksize=25, retry_count=retry_count, pause=pause, session=session, interval="d", ).read() elif data_source == "av-forex": return AVForexReader( symbols=name, retry_count=retry_count, pause=pause, session=session, api_key=api_key, ).read() elif data_source == "av-daily": return AVTimeSeriesReader( symbols=name, function="TIME_SERIES_DAILY", start=start, end=end, retry_count=retry_count, pause=pause, session=session, api_key=api_key, ).read() elif data_source == "av-daily-adjusted": return AVTimeSeriesReader( symbols=name, function="TIME_SERIES_DAILY_ADJUSTED", start=start, end=end, retry_count=retry_count, pause=pause, session=session, api_key=api_key, ).read() elif data_source == "av-weekly": return AVTimeSeriesReader( symbols=name, function="TIME_SERIES_WEEKLY", start=start, end=end, retry_count=retry_count, pause=pause, session=session, api_key=api_key, ).read() elif data_source == "av-weekly-adjusted": return AVTimeSeriesReader( symbols=name, function="TIME_SERIES_WEEKLY_ADJUSTED", start=start, end=end, retry_count=retry_count, pause=pause, session=session, api_key=api_key, ).read() elif data_source == "av-monthly": return AVTimeSeriesReader( symbols=name, function="TIME_SERIES_MONTHLY", start=start, end=end, retry_count=retry_count, pause=pause, session=session, api_key=api_key, ).read() elif data_source == "av-monthly-adjusted": return AVTimeSeriesReader( symbols=name, function="TIME_SERIES_MONTHLY_ADJUSTED", start=start, end=end, retry_count=retry_count, pause=pause, session=session, api_key=api_key, ).read() elif data_source == "av-intraday": return AVTimeSeriesReader( symbols=name, function="TIME_SERIES_INTRADAY", start=start, end=end, retry_count=retry_count, pause=pause, session=session, api_key=api_key, ).read() elif data_source == "econdb": return EcondbReader( symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, ).read() else: msg = "data_source=%r is not implemented" % data_source raise NotImplementedError(msg)
def get_data_stooq(*args, **kwargs): return StooqDailyReader(*args, **kwargs).read()
def download_data(ticker): df = StooqDailyReader(symbols=(ticker)).read() cols = {col: col + '_' + ticker for col in df.columns} df = df.rename(columns=cols) return df
avg = np.mean(array) low = avg - 2*stdev high = avg + 2*stdev return { # 'date': data.index, 'low': low, 'mean': avg, 'high': high, 'pos': (data[0]-low) / (high-low) } wig40 = "MWIG40TR.PL" dax = "^DAX" sp500 = "^SPX" dax_data = StooqDailyReader(dax).read()['Close'] mwig40_data = StooqDailyReader(wig40).read()['Close'] spx_data = StooqDailyReader(sp500).read()['Close'] # variance print('dax') print(bollinger(dax_data, 50, 0)) print('mwig40') print(bollinger(mwig40_data, 50, 0)) print('sp500') print(bollinger(spx_data, 50, 0))
def DataReader(name, data_source=None, start=None, end=None, retry_count=3, pause=0.001, session=None, access_key=None): """ Imports data from a number of online sources. Currently supports Google Finance, St. Louis FED (FRED), and Kenneth French's data library, among others. Parameters ---------- name : str or list of strs the name of the dataset. Some data sources (google, fred) will accept a list of names. data_source: {str, None} the data source ("google", "fred", "ff") start : {datetime, None} left boundary for range (defaults to 1/1/2010) end : {datetime, None} right boundary for range (defaults to today) retry_count : {int, 3} Number of times to retry query request. pause : {numeric, 0.001} Time, in seconds, to pause between consecutive queries of chunks. If single value given for symbol, represents the pause between retries. session : Session, default None requests.sessions.Session instance to be used access_key : (str, None) Optional parameter to specify an API key for certain data sources. Examples ---------- # Data from Google Finance aapl = DataReader("AAPL", "google") # Price and volume data from IEX tops = DataReader(["GS", "AAPL"], "iex-tops") # Top of book executions from IEX gs = DataReader("GS", "iex-last") # Real-time depth of book data from IEX gs = DataReader("GS", "iex-book") # Data from FRED vix = DataReader("VIXCLS", "fred") # Data from Fama/French ff = DataReader("F-F_Research_Data_Factors", "famafrench") ff = DataReader("F-F_Research_Data_Factors_weekly", "famafrench") ff = DataReader("6_Portfolios_2x3", "famafrench") ff = DataReader("F-F_ST_Reversal_Factor", "famafrench") """ if data_source == "yahoo": raise ImmediateDeprecationError(DEP_ERROR_MSG.format('Yahoo Daily')) return YahooDailyReader(symbols=name, start=start, end=end, adjust_price=False, chunksize=25, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "yahoo-actions": raise ImmediateDeprecationError(DEP_ERROR_MSG.format('Yahoo Actions')) return YahooActionReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "yahoo-dividends": comp = 'Yahoo Dividends' raise ImmediateDeprecationError(DEP_ERROR_MSG.format(comp)) return YahooDivReader(symbols=name, start=start, end=end, adjust_price=False, chunksize=25, retry_count=retry_count, pause=pause, session=session, interval='d').read() elif data_source == "google": return GoogleDailyReader(symbols=name, start=start, end=end, chunksize=25, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "iex": return IEXDailyReader(symbols=name, start=start, end=end, chunksize=25, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "iex-tops": return IEXTops(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "iex-last": return IEXLasts(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "bankofcanada": return BankOfCanadaReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "stooq": return StooqDailyReader(symbols=name, chunksize=25, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "iex-book": return IEXDeep(symbols=name, service="book", start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "enigma": return EnigmaReader(dataset_id=name, api_key=access_key).read() elif data_source == "fred": return FredReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "famafrench": return FamaFrenchReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "oecd": return OECDReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "eurostat": return EurostatReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "edgar-index": raise ImmediateDeprecationError(DEP_ERROR_MSG.format('EDGAR')) return EdgarIndexReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == 'nasdaq': if name != 'symbols': raise ValueError("Only the string 'symbols' is supported for " "Nasdaq, not %r" % (name, )) return get_nasdaq_symbols(retry_count=retry_count, pause=pause) elif data_source == "quandl": return QuandlReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "moex": return MoexReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == "morningstar": return MorningstarDailyReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, interval="d").read() elif data_source == 'robinhood': return RobinhoodHistoricalReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session).read() elif data_source == 'tiingo': return TiingoDailyReader(symbols=name, start=start, end=end, retry_count=retry_count, pause=pause, session=session, api_key=access_key).read() else: msg = "data_source=%r is not implemented" % data_source raise NotImplementedError(msg)
from pandas_datareader.stooq import StooqDailyReader import pandas as pd import numpy as np df = StooqDailyReader(symbols='FB.US').read() # print(df) df['Open_group'] = pd.cut(x=df['Open'], bins=[80, 100, 200, df['Open'].max()]) print( df.groupby(['Open_group']).agg({ 'Open': ['sum', 'mean'] }).apply(np.round))
import pandas_datareader as web import fix_yahoo_finance as yf yf.pdr_override() # <== that's all it takes :-) #Stocks ticker_list = [ "HSBA.L", "BARC.L", 'LLOY.L', "RBS.L", "BP.L", "rdsa.l", "RIO.L", "AAL.L" ] data = web.data.get_data_yahoo(ticker_list, start="2009-07-02", end="2012-07-01")['Close'] data = data.interpolate(method='time', axis=0) print(data) print(data.isna().sum()) data.to_csv('stock_prices.csv') #FX from pandas_datareader.stooq import StooqDailyReader import datetime as dt start_date = dt.date(2009, 7, 1) end_date = dt.date(2012, 7, 1) fx = StooqDailyReader(symbols='rubgbp', start="2009-07-01", end="2012-07-01").read() fx = fx.sort_index() fx = fx[start_date:end_date]['Close'] print(fx) fx.to_csv('rubgbp.csv', header=['Close'])