import numpy as np import pandas as pd import matplotlib.pyplot as plt import matplotlib.dates as mdates my_year_month_fmt = mdates.DateFormatter('%m/%y') data = pd.read_pickle('three_stocks.pkl') #print(data.head()) # Calculating the short-window simple moving average short_rolling = data.rolling(window=20).mean() #short_rolling.head(20) # Calculating the long-window simple moving average long_rolling = data.rolling(window=100).mean() #print(long_rolling.tail()) # Using Pandas to calculate a 20-days span EMA. adjust=False specifies that we are interested in the recursive calculation mode. ema_short = data.ewm(span=20, adjust=False).mean() start_date = '2015-01-01' end_date = '2016-12-31' fig = plt.figure(figsize=(15, 9)) ax = fig.add_subplot(1, 1, 1) ax.plot(data.ix[start_date:end_date, :].index, data.ix[start_date:end_date, 'MSFT'], label='Price')
gret.index = pd.to_datetime(gret.index) date = mpl.dates.date2num(gret.index.to_pydatetime()) retur = gret["bad"].values price = gret["price"].values stars = gret["rol_mean"].values sent = gret["rol_sent"].values reviews = gret["reviews"].values t = date objects = gret.index y_pos = np.arange(len(objects)) fig, axs = plt.subplots(5, 1, figsize=(16.5, 10)) axs[0].bar(t, retur, align='center', color="gray") axs[0].set_ylabel('Returns') axs[0].grid(True) axs[0].xaxis.set_major_locator(mdates.MonthLocator()) axs[0].xaxis.set_major_formatter(mdates.DateFormatter('%B')) axs[1].plot(t, stars, color="gray") axs[1].set_ylabel('Avg. rating') axs[1].grid(True) axs[1].xaxis.set_major_locator(mdates.MonthLocator()) axs[1].xaxis.set_major_formatter(mdates.DateFormatter('%B')) axs[2].plot(t, price, color="gray") axs[2].set_ylabel('Price in [$]') axs[2].grid(True) axs[2].xaxis.set_major_locator(mdates.MonthLocator()) axs[2].xaxis.set_major_formatter(mdates.DateFormatter('%B')) axs[3].plot(t, sent, color="gray") axs[3].set_ylabel('Avg. sentiment') axs[3].grid(True) axs[3].xaxis.set_major_locator(mdates.MonthLocator()) axs[3].xaxis.set_major_formatter(mdates.DateFormatter('%B'))
def output_graph(datatype, region_schema='statewide', agerange_filter=None, region_filter=None, value_filter=None, state_filter=None, append_to_name=None): if isinstance(state_filter, str): state_filter = (state_filter, ) plt.figure(figsize=(10, 8), dpi=80) max_y = 0 for x, (k, v) in enumerate( read_csv(region_schema, datatype, agerange_filter, region_filter, value_filter, state_filter).items()): print(k) X = np.array([i[0] for i in v]) Y = [i[1] for i in v] for i in Y: if max_y < i: max_y = i plt.plot(X, Y, color=COLORS[x % len(COLORS)], label=k, marker=MARKERS[x // len(COLORS)], linestyle=STYLES[x // len(COLORS)]) y_label = (f'%s (%s)' % (datatype, ','.join(state_filter)) if state_filter else datatype) y_label = (f'%s (%s)' % (y_label, append_to_name) if append_to_name else y_label) fontP = FontProperties() fontP.set_size('small') ax = plt.gca() formatter = mdates.DateFormatter("%d/%m") ax.xaxis_date() ax.xaxis.set_major_formatter(formatter) plt.xlabel('Date') plt.ylabel(y_label) if max_y > 50 and datatype != 'new' and False: plt.yscale('log') for axis in [ax.yaxis]: axis.set_major_formatter(ScalarFormatter()) formatter = axis.get_major_formatter() axis.set_minor_formatter(formatter) plt.legend(prop=fontP) plt.grid() #plt.show() plt.savefig(GRAPH_OUTPUT_DIR / f'{y_label}.png') plt.clf()
def burndown_chart_for_sprint(self, sprint): def _day(date): return datetime(date.year, date.month, date.day) import matplotlib.dates as mdates fmt = mdates.DateFormatter('%Y-%m-%d') days = mdates.DayLocator() sprint = self.resolve_sprint(sprint) entries = self.get_entries_in_sprint(sprint) if len(entries) == 0: return False length = self.get_expected_hours_for_sprint(sprint) start, end = self.get_dates_from_sprint(sprint) time_array = np.arange( _day(start).timestamp(), _day(end).timestamp() + 86400., 86400.) hours_left = np.ones(len(time_array)) * length hours = np.zeros(len(time_array)) for e in entries: d = self.to_datetime(e[2]) idx = np.searchsorted(time_array, d.timestamp()) hours_left[idx:] -= e[4] hours[idx] += e[4] ideal_burn = self.get_ideal_burn(sprint) actual_burn = self.get_actual_burn(sprint) projected_completion = start.timestamp( ) + 86400 * length / actual_burn if actual_burn != 0. else np.nan required_burn = self.get_required_burn(sprint) dates = [datetime.fromtimestamp(t) for t in time_array] last = datetime.fromtimestamp(_day(end).timestamp() + 86400.) x_max = datetime.fromtimestamp( min(max(projected_completion, _day(end).timestamp() + 86400.), _day(end).timestamp() + 5 * 86400.)) days_left = [] for i, d in enumerate(dates): if d.timestamp() > datetime.today().timestamp(): days_left.append(d) hours_left[i:] = 0. fig, (ax, ax2) = plt.subplots(nrows=2, ncols=1, sharex=True, figsize=(12, 12)) ax.bar(dates, hours_left, align='edge', alpha=0.5) ax.plot([dates[0], last], [length, 0.], ls='--', lw=3, c='black', label='ideal burn {:.1f} hr/day'.format(ideal_burn)) # ax.scatter([datetime.fromtimestamp(projected_completion)],[0.],s=100,c='blue',label='projected completion') ax.plot( [dates[0], datetime.fromtimestamp(projected_completion)], [length, 0.], ls='--', lw=3, c='blue', label='actual burn {:.1f} hr/day'.format(actual_burn)) ax.vlines(datetime.today(), 0., length, color='green', lw=3, label='today') ax.legend() ax.set_title("Burndown for {}\nRunning {} to {}".format( self.get_sprint_name_from_sprint(sprint), dates[0].date(), dates[-1].date())) ax.xaxis.set_major_formatter(fmt) ax.xaxis.set_major_locator(days) ax.grid(True) ax.set_xlim(dates[0], x_max) ax.set_ylabel('hours') ax2.bar(dates, hours, align='edge', alpha=0.5) if len(days_left) > 0: ax2.bar(days_left, required_burn * np.ones(len(days_left)), alpha=0.5, align='edge', label='goal {:.1f} hr/day'.format(required_burn)) ax2.vlines(datetime.today(), 0., length, color='green', lw=3) ax2.set_title("Hours per day\nDaily hourly gain {:.1f}".format( self.get_daily_gain_in_sprint(sprint))) ax2.xaxis.set_major_formatter(fmt) ax2.xaxis.set_major_locator(days) ax2.grid(True) ax2.set_xlim(dates[0], x_max) ax2.set_ylabel('hours') ax2.set_xlabel('date') if len(days_left) > 0: ax2.legend() fig.autofmt_xdate() plt.tight_layout() plt.show() return True
plt.figure(figsize=(10, 7.5)) # Remove the plot frame lines. ax = plt.subplot(111) ax.spines["top"].set_visible(False) ax.spines["bottom"].set_visible(False) ax.spines["right"].set_visible(False) ax.spines["left"].set_visible(False) ax.get_xaxis().tick_bottom() ax.get_yaxis().tick_left() plt.ylabel("Bikes in use", fontsize=15) plt.title("DublinBikes average weekday usage", fontsize=22) plt.xlabel("\nData source: CityBikes http://api.citybik.es/ | " "Author: James Lawlor @lawlorino", fontsize=10) ax.plot(ts, df['mean'], color='black') ax.fill_between(ts, df['mean'] - df['std'], df['mean'] + df['std'], facecolor='blue', alpha=0.1) # ax.grid() plt.xlim(ts[0],ts[-1]) # plt.ylim(0, np.max(df['mean'] + df['std']) + 100) ax.set_ylim(bottom = 0) ax.xaxis.set_major_locator(dates.HourLocator(interval=2)) hfmt = dates.DateFormatter('%H:%M') ax.xaxis.set_major_formatter(hfmt) plt.show()
}) savemat(os.path.join(TEMPORARY_DB, 'db_HighFreqVolumeTime'), vars_to_save) # - # ## Generate a figure showing the microprice and the total exchanged volume as functions of wall clock time and volume time # axes settings timegrid = [date_mtop(i) for i in linspace(t_ms[0], t_ms[-1], 3)] pgrid_min = np.nanmin(p_mic) pgrid_max = np.nanmax(p_mic) pgrid = linspace(pgrid_min, pgrid_max, 5) volgrid_min = np.nanmin(q_t[0, q_t[0] > 0]) - 1 volgrid_max = np.nanmax(q_t[0, q_t[0] > 0]) + 1 volgrid = linspace(volgrid_min, volgrid_max, 3) myFmt = mdates.DateFormatter('%H:%M:%S') t_ms_dt = array([date_mtop(i) for i in t_ms]) f, ax = subplots(2, 2) ax[0, 0].plot(t_ms_dt, p_mic[0], c='r', lw=1) ax[0, 0].set_xticks(timegrid) ax[0, 0].set_yticks(pgrid) ax[0, 0].yaxis.set_major_formatter(FormatStrFormatter('%.3f')) ax[0, 0].xaxis.set_major_formatter(myFmt) ax[0, 0].axis([min(t_ms_dt), max(t_ms_dt), pgrid_min, pgrid_max]) ax[0, 0].set_ylabel('Microprice') ax[0, 0].set_xlabel('Wall Clock Time') ax[0, 0].set_title('Time evolution') plt.grid(True) # right-top plot ax[0, 1].set_xticks(volgrid) ax[0, 1].set_yticks(pgrid)
for line in f: tweet = json.loads(line) all_dates.append(tweet.get('created_at')) idx = pd.DatetimeIndex(all_dates) ones = np.ones(len(all_dates)) my_series = pd.Series(ones, index=idx) #Resampling/bucketing into 1-minute buckets per_minute = my_series.resample('1Min').sum().fillna(0) #Plot the Series fig, ax = plt.subplots() ax.grid(True) ax.set_title("Tweet Frequencies") hours = mdates.MinuteLocator(interval=20) date_formatter = mdates.DateFormatter('%H:%M') datemin = datetime(2020, 1, 9, 10, 0) datemax = datetime(2020, 1, 9, 12, 0) ax.xaxis.set_major_locator(hours) ax.xaxis.set_major_formatter(date_formatter) ax.set_xlim(datemin, datemax) max_freq = per_minute.max() ax.set_ylim(0, max_freq) ax.plot(per_minute.index, per_minute) plt.savefig('tweet_time_series.png')
check_XY = np.concatenate((check_X0, check_Y), axis=1) with tf.Session() as sess: saver.restore(sess, dir0 + "final.ckpt") check_outputs = sess.run(outputs, feed_dict={X: check_X, Y: check_Y}) check_mse = ((check_Y - check_outputs)**2).mean(axis=1) fig, axes = plt.subplots(n_check, 1, figsize=(9, 15)) for i in range(n_check): ax = axes[i] ax.plot(check_XY_dt[i], check_XY[i], 'k.-', label='obs') ax.plot(check_X_dt[i], check_X1[i], 'b.-', label='tid') ax.plot(check_Y_dt[i], check_outputs[i], 'r.-', label='prediction') days = mdates.DayLocator() hours = mdates.HourLocator() dt_fmt = mdates.DateFormatter('%b %d') ax.xaxis.set_major_locator(days) ax.xaxis.set_major_formatter(dt_fmt) ax.xaxis.set_minor_locator(hours) ax.set_title('mse = {:0.4f}'.format(check_mse[i])) ax.legend(loc=2) ax.set_xlabel( '{:s} {:s} X:{:d}h Y:{:d}h Overall test mse = {:0.4f}m'.format( station, feature, x_len, y_len, mse_test), weight='bold') fig.tight_layout() fig.savefig(dir0 + 'check.png', format='png', dpi=300) plt.close(fig) #%% MSE plot with tf.Session() as sess:
def get_ticks(start, end): from datetime import timedelta as td delta = end - start if delta <= td(minutes=10): loc = mdt.MinuteLocator() fmt = mdt.DateFormatter('%H:%M') elif delta <= td(minutes=30): loc = mdt.MinuteLocator(byminute=range(0, 60, 5)) fmt = mdt.DateFormatter('%H:%M') elif delta <= td(hours=1): loc = mdt.MinuteLocator(byminute=range(0, 60, 15)) fmt = mdt.DateFormatter('%H:%M') elif delta <= td(hours=6): loc = mdt.HourLocator() fmt = mdt.DateFormatter('%H:%M') elif delta <= td(days=1): loc = mdt.HourLocator(byhour=range(0, 24, 3)) fmt = mdt.DateFormatter('%H:%M') elif delta <= td(days=3): loc = mdt.HourLocator(byhour=range(0, 24, 12)) fmt = mdt.DateFormatter('%d/%m %H') elif delta <= td(weeks=2): loc = mdt.DayLocator() fmt = mdt.DateFormatter('%d/%m') elif delta <= td(weeks=12): loc = mdt.WeekdayLocator() fmt = mdt.DateFormatter('%d/%m') elif delta <= td(weeks=104): loc = mdt.MonthLocator() fmt = mdt.DateFormatter('%d/%m') elif delta <= td(weeks=208): loc = mdt.MonthLocator(interval=3) fmt = mdt.DateFormatter('%d/%m/%y') else: loc = mdt.MonthLocator(interval=6) fmt = mdt.DateFormatter('%d/%m/%y') return loc, fmt
def animate(i): global refreshRate global DatCounter def rsiIndicator(priceData, location="top"): try: if location == "top": values = { 'key': 1, "prices": priceData, "periods": topIndicator[1] } if location == "bottom": values = { 'key': 1, "prices": priceData, "periods": bottomIndicator[1] } url = "http://seaofbtc.com/api/indicator/rsi" data = urllib.parse.urlencode(values) data = data.encode("utf-8") req = urllib.request.Request(url, data) resp = urllib.request.urlopen(req) respData = resp.read() newData = str(respData).replace("b", "").replace("[", "").replace( "]", "").replace("'", "") priceList = newData.split(', ') rsiData = [float(i) for i in priceList] if location == "top": a0.plot_date(OHLC['MPLDates'], rsiData, lightColor, label="RSI") #datLabel = "RSI("+str(topIndicator[1])+")" #a0.set_ylabel(datLabel) if location == "bottom": a3.plot_date(OHLC['MPLDates'], rsiData, lightColor, label="RSI") #datLabel = "RSI("+str(topIndicator[1])+")" #a3.set_ylabel(datLabel) except Exception as e: print("failed in rsi", str(e)) if chartLoad: if paneCount == 1: if DataPace == "tick": try: if exchange == "BTC-e": a = plt.subplot2grid((6, 4), (0, 0), rowspan=5, colspan=4) a2 = plt.subplot2grid((6, 4), (5, 0), rowspan=1, colspan=4, sharex=a) dataLink = 'https://btc-e.com/api/3/trades/btc_usd?limit=2000' data = urllib.request.urlopen(dataLink) data = data.readall().decode("utf-8") data = json.loads(data) data = data["btc_usd"] data = pd.DataFrame(data) data["datestamp"] = np.array( data['timestamp']).astype("datetime64[s]") allDates = data["datestamp"].tolist() buys = data[(data['type'] == "bid")] #buys["datestamp"] = np.array(buys["timestamp"]).astype("datetime64[s]") buyDates = (buys["datestamp"]).tolist() sells = data[(data['type'] == "ask")] #sells["datestamp"] = np.array(sells["timestamp"]).astype("datetime64[s]") sellDates = (sells["datestamp"]).tolist() volume = data["amount"] a.clear() a.plot_date(buyDates, buys["price"], lightColor, label="buys") a.plot_date(sellDates, sells["price"], darkColor, label="sells") a2.fill_between(allDates, 0, volume, facecolor=darkColor) a.xaxis.set_major_locator(mticker.MaxNLocator(5)) a.xaxis.set_major_formatter( mdates.DateFormatter("%Y-%m-%d %H:%M:%S")) plt.setp(a.get_xticklabels(), visible=False) a.legend(bbox_to_anchor=(0, 1.02, 1, .102), loc=3, ncol=2, borderaxespad=0) title = "BTC-e BTCUSD Prices\nLast Price: " + str( data["price"][1999]) a.set_title(title) priceData = data['price'].apply(float).tolist() if exchange == "Bitstamp": a = plt.subplot2grid((6, 4), (0, 0), rowspan=5, colspan=4) a2 = plt.subplot2grid((6, 4), (5, 0), rowspan=1, colspan=4, sharex=a) dataLink = 'https://www.bitstamp.net/api/transactions/' data = urllib.request.urlopen(dataLink) data = data.readall().decode("utf-8") data = json.loads(data) data = pd.DataFrame(data) data["datestamp"] = np.array( data['date'].apply(int)).astype("datetime64[s]") dateStamps = data["datestamp"].tolist() #allDates = data["datestamp"].tolist() ## buys = data[(data['type']=="bid")] ## #buys["datestamp"] = np.array(buys["timestamp"]).astype("datetime64[s]") ## buyDates = (buys["datestamp"]).tolist() ## ## ## sells = data[(data['type']=="ask")] ## #sells["datestamp"] = np.array(sells["timestamp"]).astype("datetime64[s]") ## sellDates = (sells["datestamp"]).tolist() volume = data["amount"].apply(float).tolist() a.clear() a.plot_date(dateStamps, data["price"], lightColor, label="buys") a2.fill_between(dateStamps, 0, volume, facecolor=darkColor) a.xaxis.set_major_locator(mticker.MaxNLocator(5)) a.xaxis.set_major_formatter( mdates.DateFormatter("%Y-%m-%d %H:%M:%S")) plt.setp(a.get_xticklabels(), visible=False) a.legend(bbox_to_anchor=(0, 1.02, 1, .102), loc=3, ncol=2, borderaxespad=0) title = "Bitstamp BTCUSD Prices\nLast Price: " + str( data["price"][0]) a.set_title(title) priceData = data['price'].apply(float).tolist() if exchange == "Bitfinex": a = plt.subplot2grid((6, 4), (0, 0), rowspan=5, colspan=4) a2 = plt.subplot2grid((6, 4), (5, 0), rowspan=1, colspan=4, sharex=a) dataLink = 'https://api.bitfinex.com/v1/trades/btcusd?limit=2000' data = urllib.request.urlopen(dataLink) data = data.readall().decode("utf-8") data = json.loads(data) data = pd.DataFrame(data) data["datestamp"] = np.array( data['timestamp']).astype("datetime64[s]") allDates = data["datestamp"].tolist() buys = data[(data['type'] == "buy")] #buys["datestamp"] = np.array(buys["timestamp"]).astype("datetime64[s]") buyDates = (buys["datestamp"]).tolist() sells = data[(data['type'] == "sell")] #sells["datestamp"] = np.array(sells["timestamp"]).astype("datetime64[s]") sellDates = (sells["datestamp"]).tolist() volume = data["amount"].apply(float).tolist() a.clear() a.plot_date(buyDates, buys["price"], lightColor, label="buys") a.plot_date(sellDates, sells["price"], darkColor, label="sells") a2.fill_between(allDates, 0, volume, facecolor=darkColor) a.xaxis.set_major_locator(mticker.MaxNLocator(5)) a.xaxis.set_major_formatter( mdates.DateFormatter("%Y-%m-%d %H:%M:%S")) plt.setp(a.get_xticklabels(), visible=False) a.legend(bbox_to_anchor=(0, 1.02, 1, .102), loc=3, ncol=2, borderaxespad=0) title = "Bitfinex BTCUSD Prices\nLast Price: " + str( data["price"][0]) a.set_title(title) priceData = data['price'].apply(float).tolist() if exchange == "Huobi": a = plt.subplot2grid((6, 4), (0, 0), rowspan=6, colspan=4) data = urllib.request.urlopen( 'http://seaofbtc.com/api/basic/price?key=1&tf=1d&exchange=' + programName).read() data = data.decode() data = json.loads(data) dateStamp = np.array(data[0]).astype("datetime64[s]") dateStamp = dateStamp.tolist() df = pd.DataFrame({'Datetime': dateStamp}) df['Price'] = data[1] df['Volume'] = data[2] df['Symbol'] = "BTCUSD" df['MPLDate'] = df['Datetime'].apply( lambda date: mdates.date2num(date.to_pydatetime())) df = df.set_index("Datetime") lastPrice = df["Price"][-1] a.plot_date(df['MPLDate'][-4500:], df['Price'][-4500:], lightColor, label="price") a.xaxis.set_major_locator(mticker.MaxNLocator(5)) a.xaxis.set_major_formatter( mdates.DateFormatter("%Y-%m-%d %H:%M:%S")) title = "Huobi BTCUSD Prices\nLast Price: " + str( lastPrice) a.set_title(title) priceData = df['price'].apply(float).tolist() except Exception as e: print("Failed because of:", e) else: if DatCounter > 12: try: if exchange == "Huobi": if topIndicator != "none": a = plt.subplot2grid((6, 4), (1, 0), rowspan=5, colspan=4) a2 = plt.subplot2grid((6, 4), (0, 0), sharex=a, rowspan=1, colspan=4) else: a = plt.subplot2grid((6, 4), (0, 0), rowspan=6, colspan=4) else: if topIndicator != "none" and bottomIndicator != "none": # Main Graph a = plt.subplot2grid((6, 4), (1, 0), rowspan=3, colspan=4) # Volume a2 = plt.subplot2grid((6, 4), (4, 0), sharex=a, rowspan=1, colspan=4) # Bottom Indicator a3 = plt.subplot2grid((6, 4), (5, 0), sharex=a, rowspan=1, colspan=4) # Top Indicator a0 = plt.subplot2grid((6, 4), (0, 0), sharex=a, rowspan=1, colspan=4) elif topIndicator != "none": # Main Graph a = plt.subplot2grid((6, 4), (1, 0), rowspan=4, colspan=4) # Volume a2 = plt.subplot2grid((6, 4), (5, 0), sharex=a, rowspan=1, colspan=4) # Top Indicator a0 = plt.subplot2grid((6, 4), (0, 0), sharex=a, rowspan=1, colspan=4) elif bottomIndicator != "none": # Main Graph a = plt.subplot2grid((6, 4), (0, 0), rowspan=4, colspan=4) # Volume a2 = plt.subplot2grid((6, 4), (4, 0), sharex=a, rowspan=1, colspan=4) # Bottom Indicator a3 = plt.subplot2grid((6, 4), (5, 0), sharex=a, rowspan=1, colspan=4) else: # Main Graph a = plt.subplot2grid((6, 4), (0, 0), rowspan=5, colspan=4) # Volume a2 = plt.subplot2grid((6, 4), (5, 0), sharex=a, rowspan=1, colspan=4) data = urllib.request.urlopen( "http://seaofbtc.com/api/basic/price?key=1&tf=" + DataPace + "&exchange=" + programName).read() data = data.decode() data = json.loads(data) dateStamp = np.array(data[0]).astype("datetime64[s]") dateStamp = dateStamp.tolist() df = pd.DataFrame({'Datetime': dateStamp}) df['Price'] = data[1] df['Volume'] = data[2] df['Symbol'] = 'BTCUSD' df['MPLDate'] = df['Datetime'].apply( lambda date: mdates.date2num(date.to_pydatetime())) df = df.set_index("Datetime") OHLC = df['Price'].resample(resampleSize, how="ohlc") OHLC = OHLC.dropna() volumeData = df['Volume'].resample( resampleSize, how={'volume': 'sum'}) OHLC["dateCopy"] = OHLC.index OHLC["MPLDates"] = OHLC["dateCopy"].apply( lambda date: mdates.date2num(date.to_pydatetime())) del OHLC["dateCopy"] volumeData["dateCopy"] = volumeData.index volumeData["MPLDates"] = volumeData["dateCopy"].apply( lambda date: mdates.date2num(date.to_pydatetime())) del volumeData["dateCopy"] priceData = OHLC['close'].apply(float).tolist() a.clear() if middleIndicator != "none": for eachMA in middleIndicator: #ewma = pd.stats.moments.ewma if eachMA[0] == "sma": sma = pd.rolling_mean( OHLC["close"], eachMA[1]) label = str(eachMA[1]) + " SMA" a.plot(OHLC["MPLDates"], sma, label=label) if eachMA[0] == "ema": ewma = pd.stats.moments.ewma label = str(eachMA[1]) + " EMA" a.plot(OHLC["MPLDates"], ewma(OHLC["close"], eachMA[1]), label=label) a.legend(loc=0) if topIndicator[0] == "rsi": rsiIndicator(priceData, "top") elif topIndicator == "macd": try: computeMACD(priceData, location="top") except Exception as e: print(str(e)) if bottomIndicator[0] == "rsi": rsiIndicator(priceData, "bottom") elif bottomIndicator == "macd": try: computeMACD(priceData, location="bottom") except Exception as e: print(str(e)) csticks = candlestick_ohlc( a, OHLC[["MPLDates", "open", "high", "low", "close"]].values, width=candleWidth, colorup=lightColor, colordown=darkColor) a.set_ylabel("Price") if exchange != "Huobi": a2.fill_between(volumeData["MPLDates"], 0, volumeData['volume'], facecolor=darkColor) a2.set_ylabel("Volume") a.xaxis.set_major_locator(mticker.MaxNLocator(3)) a.xaxis.set_major_formatter( mdates.DateFormatter('%Y-%m-%d %H:%M')) if exchange != "Huobi": plt.setp(a.get_xticklabels(), visible=False) if topIndicator != "none": plt.setp(a0.get_xticklabels(), visible=False) if bottomIndicator != "none": plt.setp(a2.get_xticklabels(), visible=False) x = (len(OHLC['close'])) - 1 if DataPace == "1d": title = exchange + " 1 Day Data with " + resampleSize + " Bars\nLast Price: " + str( OHLC['close'][x]) if DataPace == "3d": title = exchange + " 3 Day Data with " + resampleSize + " Bars\nLast Price: " + str( OHLC['close'][x]) if DataPace == "7d": title = exchange + " 7 Day Data with " + resampleSize + " Bars\nLast Price: " + str( OHLC['close'][x]) if topIndicator != "none": a0.set_title(title) else: a.set_title(title) print("New Graph") DatCounter = 0 except Exception as e: print('failed in the non-tick animate:', str(e)) DatCounter = 9000 else: DatCounter += 1
ax.plot(np.tile(datetime(2018, 9, 11, 18, 0, 0), len(gliders) + 2), np.arange(-1, len(gliders) + 1), 'k') ax.plot(np.tile(datetime(2018, 9, 13, 18, 0, 0), len(gliders) + 2), np.arange(-1, len(gliders) + 1), 'k') ax.plot(np.tile(datetime(2018, 10, 8, 15, 0, 0), len(gliders) + 2), np.arange(-1, len(gliders) + 1), 'k') ax.plot(np.tile(datetime(2018, 10, 10, 18, 0, 0), len(gliders) + 2), np.arange(-1, len(gliders) + 1), 'k') ax.legend([h0[0],h1[0],h2[0],h3[0],h4[0],h5[0],h6[0]],['Navy - 30','NOAA - 21','NSF - 6','NJ - 2','FL - 1','BIOS - 1','TWR - 1'],\ loc='center left',fontsize=20,bbox_to_anchor=(0, 0.4)) xfmt = mdates.DateFormatter('%d-%b') ax.xaxis.set_major_formatter(xfmt) ax.set_xlabel('2018 Date (DD-Month UTC)', fontsize=24) #plt.grid(color='k', linestyle='--', linewidth=1) ax.set_ylim(-1, len(glider)) ax.grid(True) plt.grid(color='k', linestyle='--', linewidth=1) wd = datetime(2018, 9, 13, 18, 0, 0) - datetime(2018, 9, 11, 18, 0, 0) rect = plt.Rectangle((datetime(2018, 9, 11, 18, 8, 0), -1), wd, len(glider) + 1, color='k', alpha=0.3, zorder=10)
def graph_candlestick(self, symbol, chooser, start=None, end=None, minutes=1, dtFormat="%H:%M", save='trade'): ''' Currently this will retrieve the data using apiChooser. Set self.preferences to limit acceptible apis. To place tx markers, set (or clear) fp.entries and fp.exits prior to calling :params symbol: The stock ticker :params chooser: APIChooser object :params start: A datetime object or time string for the begining of the graph. The day must be within the last 7 days. This may change in the future. :params end: A datetime object or time string for the end of a graph. Defaults to whatever the call gets. :params dtFormat: a strftime formt to display the dates on the x axis of the chart :parmas st: The matplot lib style for style.use(st). If fp.randomStyle is set, it overrides. ''' register_matplotlib_converters() start = pd.Timestamp(start) end = pd.Timestamp(end) if self.style: style.use(self.style) # ############### Prepare data ############## # Get the data and prepare the DtaFrames from some stock api meta, df, maDict = chooser.get_intraday(symbol, start=start, end=end, minutes=minutes) if df.empty: if not isinstance(meta, int): self.apiset.setValue('errorCode', str(meta['code'])) self.apiset.setValue('errorMessage', meta['message']) return None df['date'] = df.index if len(df.index) > self.max_candles: print(f"Your graph would have {len(df.index)} candles. Please limit the dates or increse the candle size") return None df['date'] = df['date'].map(mdates.date2num) df_ohlc = df[['date', 'open', 'high', 'low', 'close']] df_volume = df[['date', 'volume']] # ############### End Prepare data ############## # ###### PLOT and Graph ####### colup = self.chartSet.value('colorup', 'g') coldown = self.chartSet.value('colordown', 'r') ax1 = plt.subplot2grid((6, 1), (0, 0), rowspan=5, colspan=1) ax1.set_axisbelow(True) if self.gridlines[1]: ax1.grid(b=self.gridlines[0], which='major', axis=self.gridlines[1]) ax2 = plt.subplot2grid((6, 1), (5, 0), rowspan=1, colspan=1, sharex=ax1) fig = plt.gcf() fig.subplots_adjust(hspace=0) # candle width is a percentage of a day width = (minutes * 35) / (3600 * 24) candlestick_ohlc(ax1, df_ohlc.values, width, colorup=colup, colordown=coldown, alpha=.99) for date, volume, dopen, close in zip(df_volume.date.values, df_volume.volume.values, df_ohlc.open.values, df_ohlc.close.values): color = colup if close > dopen else 'k' if close == dopen else coldown ax2.bar(date, volume, width, color=color) # ###### END PLOT and Graph ####### # ###### ENTRY MARKER STUFF ####### markersize = self.chartSet.value('markersize', 90) edgec = self.chartSet.value('markeredgecolor', '#000000') alpha = float(self.chartSet.value('markeralpha', 0.5)) tz = df_ohlc.index[0].tzinfo for entry in self.entries: e = entry[3] if isinstance(e, str): e = pd.Timestamp(start.strftime('%Y-%m-%d ') + e, tzinfo=tz) else: # Currently only finnhub usess tz aware dates, and that is only after retrieving the data if e.tzinfo: e = e.tz_convert(tz) else: e = e.tz_localize(tz) # TODO: indexing the candle does not work if there is missing data e.g. a halt candleIndex = int((e - df_ohlc.index[0]).total_seconds() / 60 // minutes) if candleIndex < 0 or candleIndex > (len(df_ohlc) - 1): continue x = df_ohlc.index[candleIndex] y = entry[0] if entry[2] == 'B': facec = self.chartSet.value('markercolorup', 'g') mark = '^' else: facec = self.chartSet.value('markercolordown', 'r') mark = 'v' sc = ax1.scatter(x, y, color=facec, marker=markers.MarkerStyle( marker=mark, fillstyle='full'), s=markersize, zorder=10) sc.set_edgecolor(edgec) sc.set_alpha(alpha) # ###### END MARKER STUFF ####### # #### TICKS-and ANNOTATIONS ##### ax1.yaxis.tick_right() ax2.yaxis.tick_right() # ax1.grid(True, axis='y') plt.setp(ax1.get_xticklabels(), visible=False) for label in ax2.xaxis.get_ticklabels(): label.set_rotation(-45) label.set_fontsize(8) ax2.xaxis.set_major_formatter(mdates.DateFormatter(dtFormat)) ax2.yaxis.set_major_formatter(FuncFormatter(self.volFormat)) plt.locator_params(axis='y', tight=True, nbins=2) numcand = ((end - start).total_seconds() / 60) // minutes ax2.xaxis.set_major_locator(mdates.MinuteLocator( byminute=self.setticks(minutes, numcand))) idx = int(len(df_ohlc.date) * .39) ax1.annotate(f'{symbol} {minutes} minute', (df_ohlc.date[idx], df_ohlc.low.max()), xytext=(0.4, 0.85), textcoords='axes fraction', alpha=0.35, size=16) # annotate the data source. ax2.annotate(f'Data is from {chooser.api}', xy=(0.99, 0), xytext=(0, 10), xycoords=('axes fraction', 'figure fraction'), textcoords='offset points', size=7, ha='right', va='bottom') # #### END TICKS-and ANNOTATIONS ##### # ###### ma, ema and vwap ####### # MA1 = 9 # MA2 = 20 # MA3 = 50 # MA4 = 200 # MA5 = 'vwap' if maDict: maSetDict = getMASettings() for ma in maSetDict[0]: if ma not in maDict.keys(): continue ax1.plot(df_ohlc.date, maDict[ma], lw=1, color=maSetDict[0][ma][1], label=f'{ma}MA') if 'vwap' in maDict.keys(): ax1.plot(df_ohlc.date, maDict['vwap'], lw=1, color=maSetDict[1][0][1], label='VWAP') if self.legend: leg = ax1.legend() leg.get_frame().set_alpha(0.35) # #### Adjust margins and frame top = df_ohlc.high.max() bottom = df_ohlc.low.min() margin = (top - bottom) * .08 ax1.set_ylim(bottom=bottom - margin, top=top + (margin * 2)) ad = self.adjust plt.subplots_adjust(left=ad['left'], bottom=ad['bottom'], right=ad['right'], top=ad['top'], wspace=0.2, hspace=0) if self.chartSet.value('interactive', False, bool): # plt.savefig('out/figure_1.png') plt.show() count = 1 saveorig = save while os.path.exists(save): s, ext = os.path.splitext(saveorig) save = '{}({}){}'.format(s, count, ext) count = count + 1 fig.savefig(save) return save
def animate(i): graph_data = open('twitter_msft_score3-0.txt','r').read() lines = graph_data.split('\n') #graph_data.close() xs = [] ys = [] timeStamp = [] ss = [] totalScore = 250 for line in lines[-5000:]: if len(line) > 1: x, score, timeStamp = line.split(',') #x, y = line.split(',') #print(timeStamp) #print(y) #print(x) timeStamp = timeStamp.split(' ') #month = timeStamp[1] #date = timeStamp[2] #time = timeStamp[3] #year = timeStamp[5] #print(timeStamp[1]) #print(timeStamp[2]) #print(timeStamp[3]) #print(timeStamp[4]) #print(timeStamp[5]) date = timeStamp[1] + " " + timeStamp[2] + " " + timeStamp[5] + " " + timeStamp[3] #print(date) #date_object = datetime.strptime('Jun 1 2005 1:33PM', '%b %d %Y %I:%M%p') date_object = datetime.strptime(date, '%b %d %Y %H:%M:%S') date_object = date_object + timedelta(hours=-4) #print(date_object) xs.append(date_object) #xs.append(x) totalScore = totalScore + float(score) ys.append(totalScore) ss.append(score) ''' if float(y) > 30: color.append('g') else: color.append('r') #print(float(y)) print(color) ''' #print(ys) ax1.clear() ax1.plot(xs, ys) ax1.set_xlabel('Time') ax1.set_ylabel('Total Score') ax1.set_title('"msft", "microsoft", "windows10"') #ax1.xaxis.set_visible(False) ax1.xaxis.set_major_formatter(mdates.DateFormatter('%b %d %Y %H:%M')) #ax1.autofmt_xdate() #ax1.xticks(rotation=45) #ax1.xaxis.set_rotation(45) for tick in ax1.get_xticklabels(): tick.set_rotation(45) tick.set_horizontalalignment('right') ax2.clear() ax2.plot(xs, ss) ax2.set_xlabel('Time') ax2.set_ylabel('Score') #plt.pyplot.sca(1) #plt.xticks(rotation=45) #ax2.xaxis.set_major_formatter(mdates.DateFormatter('%b %d %Y %H:%M:%S')) ax2.xaxis.set_major_formatter(mdates.DateFormatter('%b %d %Y %H:%M')) #ax2.xticks.set_rotation(45) #ax2.autofmt_xdate() #plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%b %d %Y %H:%M:%S')) #plt.gcf().autofmt_xdate() #plt.xticks(rotation=45) for tick in ax2.get_xticklabels(): tick.set_rotation(45) tick.set_horizontalalignment('right') ax3.clear() x = np.array(ss) x = x.astype(np.float) ax3.hist(x, 100, color='g', alpha=0.75) ax3.set_ylim(0,100) ax3.set_xlim(-1,1) ax3.set_xlabel('Score Distribution') ax3.set_ylabel('# of Tweets')
ax[1].plot(datenum_price_data, article_wallet, color="b", linewidth=0.5) ax[1].plot(datenum_price_data, funding_wallet, color="r", linewidth=0.5) ax[1].plot(datenum_price_data, combined_wallet, color="g", linewidth=0.5) # label axes ax[0].set_ylabel("Price") ax[1].set_ylabel("Wallet") # ax[2].set_ylabel("Combined Signal") # ax[3].set_ylabel("Wallet") # legend ax[1].legend(["Article", "Funding", "Combined"]) # generate the time axes plt.subplots_adjust(bottom=0.2) plt.xticks(rotation=25) ax[0] = plt.gca() xfmt = md.DateFormatter('%Y-%m-%d %H:%M') ax[0].xaxis.set_major_formatter(xfmt) plt.gcf().set_size_inches(32, 18) # save the plot plt.savefig('plots/compare_sentiment.png', bbox_inches='tight') # show the plot # ani = animation.FuncAnimation(fig, animate, frames=days, interval=1) plt.show()
def main(): days = readstkData(daylinefilespath, stock_b_code) # convert the datetime64 column in the dataframe to 'float days' days['date'] = pd.to_datetime(days['date']) days['date'] = mdates.date2num(days['date'].astype(dt.date)) #time_format = '%Y-%m-%d' #days['date']=[dt.datetime.strptime(i, time_format) for i in days['date']] Av1 = days['ma5'] Av2 = days['ma10'] #quotes = np.array(days) quotes = zip(days['date'], days['open'], days['high'], days['low'], days['close']) fig = plt.figure(facecolor='#07000d', figsize=(15, 10)) #fig = plt.figure() ax1 = plt.subplot2grid((6, 4), (1, 0), rowspan=4, colspan=4, axisbg='#07000d') candlestick_ohlc(ax1, quotes, width=.6, colorup='#ff1717', colordown='#53c156') Label1 = str(MA5) + ' SMA' Label2 = str(MA10) + ' SMA' ax1.plot(days.date, Av1, '#e1edf9', label=Label1, linewidth=1.5) ax1.plot(days.date, Av2, '#4ee6fd', label=Label2, linewidth=1.5) ax1.grid(True, color='w') ax1.xaxis.set_major_locator(mticker.MaxNLocator(10)) ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d')) ax1.yaxis.label.set_color("w") ax1.spines['bottom'].set_color("#5998ff") ax1.spines['top'].set_color("#5998ff") ax1.spines['left'].set_color("#5998ff") ax1.spines['right'].set_color("#5998ff") ax1.tick_params(axis='y', colors='w') ax1.tick_params(axis='x', colors='w') volumeMin = 0 ax1v = ax1.twinx() ax1v.fill_between(days.date, volumeMin, days.volume, facecolor='#00ffe8', alpha=.4) ax1v.axes.yaxis.set_ticklabels([]) ax1v.grid(False) ###Edit this to 3, so it's a bit larger ax1v.set_ylim(0, 3 * days.volume.values.max()) ax1v.spines['bottom'].set_color("#5998ff") ax1v.spines['top'].set_color("#5998ff") ax1v.spines['left'].set_color("#5998ff") ax1v.spines['right'].set_color("#5998ff") ax1v.tick_params(axis='x', colors='w') ax1v.tick_params(axis='y', colors='w') ax1.set_ylabel('Stock price and Volume') ax1.set_title(stock_b_code, color='w') plt.gca().yaxis.set_major_locator(mticker.MaxNLocator(prune='upper')) plt.legend(loc='best') plt.show()
if is3d: v=ds[var][:,-1,:] #extract just surface value else: v = ds[var][:] v = v[ind_list,:] y = np.arange(0,v.shape[1]) p = ax.pcolormesh(dt_list,y,np.transpose(v),cmap='rainbow') ax.text(0.1,0.9,var,color='black',fontweight='bold',transform=ax.transAxes) ax.set_ylabel('index along boundary') ax.get_xaxis().set_visible(False) if row==2: ax.get_xaxis().set_visible(True) ax.xaxis.set_major_formatter(mdates.DateFormatter("%b %d %Y")) plt.setp( ax.xaxis.get_majorticklabels(), rotation=30, ha="right",rotation_mode='anchor') ax.set_xlabel('Time',fontweight='bold') row+=1 col+=1 ds.close() plt.tight_layout() if is3d: out_fn = '/data0/ebrasseale/WQ_plots/ROMS_2018_BC_surf_'+var_name+'.png' else: out_fn = '/data0/ebrasseale/WQ_plots/ROMS_2018_BC_'+var_name+'.png' plt.savefig(out_fn)
def hurst_graph(stocks, maximum_chunk, minimum_chunk, r): def hurst(data_set, minimum_chunk, r): def chunk_calc(data_set): for num in range(len(data_set)): data_set_adjusted = data_set[num:] n = (math.log(len(data_set_adjusted), minimum_chunk)) if (n % int(n)) == 0.0: return int(n), data_set_adjusted break def chunk_pad(it, size, padval=None): it = chain(iter(it), repeat(padval)) return list( iter(lambda: tuple(islice(it, size)), (padval, ) * size)) expo, data_set_a = chunk_calc(data_set) def chunks(data_set_a): def first_chunk(data_set_a): mean = round(np.mean(data_set), r) sd = round(std(data_set), r) mean_centered_series = [] cumulative_deviation = [] for num in data_set_a: mean_centered_series.append(round(num - mean, r)) for num in range(len(mean_centered_series)): cumulative_deviation.append( round(sum(mean_centered_series[:num]), r)) Range = round( max(cumulative_deviation) - min(cumulative_deviation), r) rescaled_range = round((Range / sd), r) log_of_rs = round(log(rescaled_range), r) log_of_size = round(log(len(data_set_a)), r) return log_of_rs, log_of_size list_of_log_of_rs = [] list_of_log_of_size = [] list_of_log_of_rs.append(first_chunk(data_set_a)[0]) list_of_log_of_size.append(first_chunk(data_set_a)[1]) for num in (list(range(expo + 1))[1:-1]): Ranges = [] rescaled_ranges = [] denominater = minimum_chunk**num num_of_chunks = (int(int(len(data_set_a)) / denominater)) for chunk in (chunk_pad(data_set_a, num_of_chunks)): mean = round(np.mean(chunk), r) sd = round(std(chunk), r) mean_centered_series = [] cumulative_deviation = [] for n in chunk: mean_centered_series.append(round(n - mean, r)) for n in range(len(chunk)): cumulative_deviation.append( round(sum(mean_centered_series[:n]), r)) Range = round( max(cumulative_deviation) - min(cumulative_deviation), r) rescaled_ranges.append(round(Range / sd, r)) avg_rescaled_range = round( sum(rescaled_ranges) / denominater, r) list_of_log_of_rs.append(round(log(avg_rescaled_range), r)) list_of_log_of_size.append( round(log(len((chunk_pad(data_set_a, num_of_chunks))[0])), r)) #plt.scatter(list_of_log_of_size,list_of_log_of_rs) #plt.show() #return list_of_log_of_rs,list_of_log_of_size def invertList(input_list): for item in range(len(input_list) // 2): input_list[item], input_list[len(input_list) - 1 - item] = input_list[ len(input_list) - 1 - item], input_list[item] return input_list Y = invertList(list_of_log_of_rs) X = invertList(list_of_log_of_size) def best_fit(X, Y): xbar = sum(X) / len(X) ybar = sum(Y) / len(Y) n = len(X) # or len(Y) numer = sum([xi * yi for xi, yi in zip(X, Y)]) - n * xbar * ybar denum = sum([xi**2 for xi in X]) - n * xbar**2 b = numer / denum a = ybar - b * xbar return a return best_fit(X, Y) return np.absolute(chunks(data_set_a)) def hurst_alt(ts): H, c, val = compute_Hc(ts) return H hurst_values = [] time_adjusted_dates = [] counter1 = 0 counter2 = maximum_chunk skipped = 0 for period in list(range(0, len(stocks[0].getValuesClose()), maximum_chunk))[1:]: difference = [] for i in range(counter1, counter2): difference.append(stocks[0].percent_change[i] - stocks[1].percent_change[i]) time_adjusted_dates.append(stocks[0].getDates()[period]) if maximum_chunk < 100: hurst_values.append(hurst(difference, minimum_chunk, r)) else: hurst_values.append(hurst_alt(difference)) counter1 += maximum_chunk counter2 += maximum_chunk plt.plot(time_adjusted_dates, hurst_values) plt.xlabel('Dates') plt.ylabel('Hurst Values') plt.title(str(maximum_chunk) + '-Day Hurst Chart') #dealing with the x-axis labels years = mdates.YearLocator() # every year months = mdates.MonthLocator() # every month years_fmt = mdates.DateFormatter('%Y') plt.gca().xaxis.set_major_locator(years) plt.gca().xaxis.set_major_formatter(years_fmt) plt.gca().xaxis.set_minor_locator(months) plt.gcf().autofmt_xdate() plt.show()
def do_plot(self, wallet, history): balance_Val = [] fee_val = [] value_val = [] datenums = [] unknown_trans = 0 pending_trans = 0 counter_trans = 0 balance = 0 for item in history: tx_hash, confirmations, value, timestamp, balance = item if confirmations: if timestamp is not None: try: datenums.append( md.date2num( datetime.datetime.fromtimestamp(timestamp))) balance_Val.append(1000. * balance / COIN) except [RuntimeError, TypeError, NameError] as reason: unknown_trans += 1 pass else: unknown_trans += 1 else: pending_trans += 1 value_val.append(1000. * value / COIN) if tx_hash: label, is_default_label = wallet.get_label(tx_hash) label = label.encode('utf-8') else: label = "" f, axarr = plt.subplots(2, sharex=True) plt.subplots_adjust(bottom=0.2) plt.xticks(rotation=25) ax = plt.gca() x = 19 test11 = "Unknown transactions = " + str( unknown_trans) + " Pending transactions = " + str( pending_trans) + " ." box1 = TextArea(" Test : Number of pending transactions", textprops=dict(color="k")) box1.set_text(test11) box = HPacker(children=[box1], align="center", pad=0.1, sep=15) anchored_box = AnchoredOffsetbox( loc=3, child=box, pad=0.5, frameon=True, bbox_to_anchor=(0.5, 1.02), bbox_transform=ax.transAxes, borderpad=0.5, ) ax.add_artist(anchored_box) plt.ylabel('mBOLI') plt.xlabel('Dates') xfmt = md.DateFormatter('%Y-%m-%d') ax.xaxis.set_major_formatter(xfmt) axarr[0].plot(datenums, balance_Val, marker='o', linestyle='-', color='blue', label='Balance') axarr[0].legend(loc='upper left') axarr[0].set_title('History Transactions') xfmt = md.DateFormatter('%Y-%m-%d') ax.xaxis.set_major_formatter(xfmt) axarr[1].plot(datenums, value_val, marker='o', linestyle='-', color='green', label='Value') axarr[1].legend(loc='upper left') # plt.annotate('unknown transaction = %d \n pending transactions = %d' %(unknown_trans,pending_trans),xy=(0.7,0.05),xycoords='axes fraction',size=12) plt.show()
# load some financial data; apple's stock price fh = cbook.get_sample_data('aapl.npy.gz') try: # Python3 cannot load python2 .npy files with datetime(object) arrays # unless the encoding is set to bytes. However this option was # not added until numpy 1.10 so this example will only work with # python 2 or with numpy 1.10 and later. r = np.load(fh, encoding='bytes') except TypeError: r = np.load(fh) fh.close() r = r[-250:] # get the last 250 days fig, ax = plt.subplots() ax.plot(r.date, r.adj_close) ax.xaxis.set_major_locator(dates.MonthLocator()) ax.xaxis.set_minor_locator(dates.MonthLocator(bymonthday=15)) ax.xaxis.set_major_formatter(ticker.NullFormatter()) ax.xaxis.set_minor_formatter(dates.DateFormatter('%b')) for tick in ax.xaxis.get_minor_ticks(): tick.tick1line.set_markersize(0) tick.tick2line.set_markersize(0) tick.label1.set_horizontalalignment('center') imid = len(r) // 2 ax.set_xlabel(str(r.date[imid].year)) plt.show()
# def zero_to_nan(values): # """Replace every 0 with 'nan' and return a copy.""" # return [float('nan') if x==0 else x for x in values] sb = DMG.get_group('SB').sum(axis=1) rg = DMG.get_group('Ryegrass').sum(axis=1) wc = DMG.get_group('Wclover').sum(axis=1) df2 = pd.DataFrame([rg, wc, sb]).T df2.columns = ['Ryegrass', 'Wclover', 'Barley'] # df2.index = df2.index.normalize() # df2.index = df2.index.floor(df2) # df2.mdates.DateFormatter('%Y-%m-%d') # Bar plot with ryegrass and clovergrass df2.plot.bar(stacked=True, figsize=(30, 5)) #plt.legend(handles=lines, fontsize='x-large', loc=2) plt.title('Harvest', fontsize=20, color='black') plt.ylabel('t DM /ha', fontsize=20) #plt.xaxis.set_xticks(df2.index) #plt.xaxis.set_major_formatter(mdates.DateFormatter("%Y-%m-%d")) #plt.xaxis.set_minor_formatter(mdates.DateFormatter("%Y-%m-%d")) #_=plt.xticks(rotation=90) # plt.legend(handles=lines, fontsize=20) # fig, ax = plt.subplots() ax.plot(df2.index, df2.values) ax.set_xticks(df2.index) ax.xaxis.set_major_formatter(mdates.DateFormatter("%Y-%m-%d")) ax.xaxis.set_minor_formatter(mdates.DateFormatter("%Y-%m-%d")) _ = plt.xticks(rotation=90)
iconfile = '/data/inscape/icon/experiments/nyalesund/iconforcing_23062017/METEOGRAM_patch004_awipev.nc' datafile = '/data/optimice/pamtra_runs/nyalesund/iconforcing_23062017_METEOGRAM_patch004_awipev.nc' plotpath = '/data/optimice/pamtra_runs/nyalesund/' iconfile = '/data/inscape/icon/experiments/fronts_postproc/METEOGRAM_patch004_joyce_26only.nc' datafile = '/data/optimice/pamtra_runs/fronts_pp/METEOGRAM_patch004_joyce26only.nc' plotpath = '/data/optimice/pamtra_runs/fronts_pp/' figsize21 = (18,12) figsize31 = (18,18) figsize41 = (18,24) figsize51 = (18,30) versus = -1 # Top Down versus = 1 # Bottom Up xfmt = md.DateFormatter('%m-%d %H') ylim=(0,8000) xDataLim = -2 def plot_variable(x,y,v,axes, xlab=None,ylab=None,vlab=None,title=None, vmin=None,vmax=None,xlim=None,ylim=None, cmap='jet', **kwargs): mesh = axes.pcolormesh(x,y,v,vmin=vmin,vmax=vmax,cmap=cmap, **kwargs) if title is not None: axes.text(0.1,0.9,title,transform=axes.transAxes,weight='black', bbox=dict(facecolor='white')) plt.colorbar(mesh,label=vlab,ax=axes) if xlab is not None: axes.set_xlabel(xlab) if ylab is not None:
print("XGBoost score on training set: ", rmse(y_test, y_pred)) # Run prediction on the test set. y_pred_xgb = regr.predict(test_df) y_pred = np.exp(y_pred_xgb) pred_df = pd.DataFrame(y_pred, index=test["id"], columns=["close"]) pred_df.to_csv('output.csv', header=True, index_label='id') #显示 dateparse = lambda dates:pd.datetime.strptime(dates,'%Y-%m-%d %H:%M:%S') data_pre = pd.read_csv('output.csv',encoding='utf-8',parse_dates=['id'],date_parser=dateparse) data_tru = pd.read_csv('test.csv',encoding='utf-8',parse_dates=['id'],date_parser=dateparse) table_pre = pd.pivot_table(data_pre,index=['id'],values=['close']) table_tru = pd.pivot_table(data_tru,index=['id'],values=['close']) fig = plt.figure() #生成axis对象 ax = fig.add_subplot(111) #本案例的figure中只包含一个图表 #设置x轴为时间格式,这句非常重要,否则x轴显示的将是类似于‘736268’这样的转码后的数字格式 ax.xaxis.set_major_formatter(mdate.DateFormatter('%Y-%m-%d %H:%M:%S')) #设置x轴坐标值和标签旋转45°的显示方式 plt.xticks(pd.date_range(table_tru.index[0],table_tru.index[-1],freq='min'),rotation=45) #x轴为table.index,y轴为价格 ax.plot(table_pre.index,table_pre['close'],color='r') ax.plot(table_tru.index,table_tru['close'],color='b') plt.show()
import matplotlib.dates as mdates import matplotlib.pyplot as plt plt.rcParams['figure.figsize'] = [20, 10] fig, ax = plt.subplots() x = virus_spain['fecha'] y = virus_spain['casos'] z = virus_spain['fallecimientos'] z1 = virus_spain['altas'] z2 = virus_spain['ingresos_uci'] z3 = virus_spain['hospitalizados'] ax.plot(x,y, linestyle='--', marker='x', color='b', label='Casos') ax.plot(x,z, linestyle='--', marker='x', color='r', label='Fallecimientos') ax.plot(x,z1, linestyle='--', marker='x', color='g', label='Altas') ax.plot(x,z2, linestyle='--', marker='x', color='y', label='Ingresos UCI') ax.plot(x,z3, linestyle='--', marker='x', label='Hospitalizados') ax.grid() plt.xticks(x) plt.legend() ax.xaxis.set_major_formatter(mdates.DateFormatter('%m-%d')) plt.savefig('sample.png') # In[ ]:
def adbl(request): import numpy as np import pandas as pd import matplotlib.pyplot as plt from sklearn.preprocessing import MinMaxScaler from sklearn.model_selection import train_test_split from math import sqrt from datetime import timedelta import matplotlib.dates as mdates from sklearn.metrics import mean_squared_error import tensorflow as tf import django import warnings warnings.filterwarnings('ignore') def getfromdatabase(): conn = psycopg2.connect(host="localhost", dbname="postgres", user="******", password="******") cur = conn.cursor() cur.execute( """select openprice,maxprice,minprice,closingprice,date from stockdatacopy where symbol = 'PLIC' order by date;""" ) row = cur.fetchall() conn.commit() cur.close() #print(row) return row # Reading the historical data of stocks from the web data = getfromdatabase() alphabet = "open high low close Date " columns = alphabet.split() #split string into a list html_data = pd.DataFrame( data, columns=columns) # load the dataset as a pandas data frame df = html_data.copy() # df.head() df.drop(['Date'], 1, inplace=True) # Dropping unnecessary columns #print(df) plt.figure(figsize=(15, 5)) plt.plot(df.open.values, color='red', label='open') plt.plot(df.close.values, color='green', label='close') plt.plot(df.low.values, color='blue', label='low') plt.plot(df.high.values, color='black', label='high') plt.title('Stock price') plt.xlabel('time [days]') plt.ylabel('Price in rs') plt.legend(loc='best') plt.show() plt.savefig('trends.jpeg', format='jpeg') sc = MinMaxScaler() scaled_data = sc.fit_transform(df) tstep = 30 # since we are looking 60 timesteps back, we can start start looping over only after 60th record in our training set data = [] # create all possible sequences of length seq_len for i in range(len(scaled_data) - tstep): data.append(scaled_data[i:i + tstep]) data = np.array(data) # Using 10% of data each for validation and test purpose valid_set_size = int(np.round(0.1 * data.shape[0])) test_set_size = valid_set_size train_set_size = data.shape[0] - 2 * valid_set_size # Creating Train data x_train = data[:train_set_size, :-1, :] y_train = data[:train_set_size, -1, :] # Creating Validation data x_valid = data[train_set_size:train_set_size + valid_set_size, :-1, :] y_valid = data[train_set_size:train_set_size + valid_set_size, -1, :] # Creating Test data x_test = data[train_set_size + valid_set_size:, :-1, :] y_test = data[train_set_size + valid_set_size:, -1, :] index_in_epoch = 0 perm_array = np.arange(x_train.shape[0]) np.random.shuffle(perm_array) # function to get the next batch def next_batch(batch_size): global index_in_epoch, x_train, perm_array start = index_in_epoch index_in_epoch += batch_size #print(index_in_epoch) if index_in_epoch > x_train.shape[0]: #print( x_train.shape[0]) np.random.shuffle(perm_array) # shuffle permutation array start = 0 # start next epoch index_in_epoch = batch_size end = index_in_epoch return x_train[perm_array[start:end]], y_train[perm_array[start:end]] # 4 features num_inputs = 4 # Num of steps in each batch num_time_steps = tstep - 1 # 100 neuron layer num_neurons = 200 num_outputs = 4 learning_rate = 0.001 # how many iterations to go through (training steps) num_train_iterations = 100 # Size of the batch of data batch_size = 50 # number of LSTM layers n_layers = 2 # Creating Placeholders for X and y. # The shape for these placeholders should be [None,num_time_steps-1,num_inputs] and [None, num_time_steps-1, num_outputs] # The reason we use num_time_steps-1 is because each of these will be one step shorter than the original time steps size, # because we are training the RNN network to predict one point into the future based on the input sequence. X = tf.placeholder(tf.float32, [None, num_time_steps, num_inputs]) y = tf.placeholder(tf.float32, [None, num_outputs]) # use Basic RNN Cell cell = [ tf.contrib.rnn.BasicRNNCell(num_units=num_neurons, activation=tf.nn.elu) for layer in range(n_layers) ] # Creatinmg stacked LSTM multi_layer_cell = tf.contrib.rnn.MultiRNNCell(cell) # Now pass in the cells variable into tf.nn.dynamic_rnn, along with your first placeholder (X) outputs, states = tf.nn.dynamic_rnn(multi_layer_cell, X, dtype=tf.float32) stacked_rnn_outputs = tf.reshape(outputs, [-1, num_neurons]) stacked_outputs = tf.layers.dense(stacked_rnn_outputs, num_outputs) final_outputs = tf.reshape(stacked_outputs, [-1, num_time_steps, num_outputs]) final_outputs = final_outputs[:, num_time_steps - 1, :] # keep only last output of sequence # Create a Mean Squared Error Loss Function and use it to minimize an AdamOptimizer. loss = tf.reduce_mean(tf.square(final_outputs - y)) # MSE optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate) train = optimizer.minimize(loss) # Initializing the global variable init = tf.global_variables_initializer() train_set_size = x_train.shape[0] test_set_size = x_test.shape[0] saver = tf.train.Saver() # with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess: with tf.Session() as sess: sess.run(init) for iteration in range( int(num_train_iterations * train_set_size / batch_size)): x_batch, y_batch = next_batch(batch_size) sess.run(train, feed_dict={X: x_batch, y: y_batch}) if iteration % 100 == 0: mse_train = loss.eval(feed_dict={X: x_train, y: y_train}) mse_valid = loss.eval(feed_dict={X: x_valid, y: y_valid}) print(iteration, '\tTrain MSE:', mse_train, '\tValidation MSE:', mse_valid) # Saving Model for future use saver.save(sess, './model/Stock_prediction_model') with tf.Session() as sess: # Using Saver instance to restore saved rnn saver.restore(sess, './model/Stock_prediction_model') y_pred = sess.run(final_outputs, feed_dict={X: x_test}) y_test = sc.inverse_transform(y_test) y_pred = sc.inverse_transform(y_pred) #print(y_pred) # Comparing the actual versus predicted price latest_date = max(pd.to_datetime(html_data['Date'])) ind = [] for i in range(test_set_size): ind.append(latest_date - timedelta(days=test_set_size - i - 1)) fig, ax = plt.subplots(figsize=(15, 7)) plt.plot( ind, y_test[:, 0], color='black', label='Actual Price') # Plotting the Open Market Price. Hence index 0 # 0 = open, 1 = close, 2 = highest, 3 = lowest ax.plot(ind, y_pred[:, 0], color='green', label='Predicted Price') ax.set_title('Stock Price Prediction') ax.set_xlabel('Date') ax.set_ylabel('Price in rs') # set ticks every week #ax.xaxis.set_major_locator(mdates.WeekdayLocator()) # set major ticks format ax.xaxis.set_major_formatter(mdates.DateFormatter('%b %d')) ax.xaxis.set_tick_params(rotation=45) ax.legend(loc='best') plt.savefig('Actual_vs_Predicted_Stock_Price.jpeg', format='jpeg') plt.show() imgdata = BytesIO() fig.savefig(imgdata, format='jpeg') imgdata.seek(0) # rewind the data im = Image.open(imgdata) canvas = FigureCanvas(im) response = django.http.HttpResponse(content_type='image/jpeg') canvas.print_jpeg(response) return response # Evaluating the model rmse = sqrt(mean_squared_error(y_pred[:, 0], y_test[:, 0])) normalized_rmse = rmse / (max(y_pred[:, 0]) - min(y_pred[:, 0])) print('Normalized RMSE: ', normalized_rmse)
from PIL.ImageFilter import BoxBlur from .helpers import env_var_line from .helpers import env_var_time DEVICE = env_var_line("WEBCAM_DEVICE") or "video0" RESOLUTION = env_var_line("WEBCAM_RESOLUTION") or "640x480" IMG_W, ING_H = map(int, RESOLUTION.split("x")) PATH_ACTUAL_IMG = (env_var_line("PATH_ACTUAL_IMG") or "/tmp/last_img.png") BLUR_RAD = IMG_W // 100 IMG_BLACK_LIMIT = 4 NETWORK_CHECK_TIMEOUT = env_var_time("NETWORK_CHECK_TIMEOUT") or 600 fig, ax = plt.subplots() ax.fmt_xdata = mdates.DateFormatter("%Y-%m-%d") ax.grid(True) def get_png_photo( png_factor: int = 9 ) -> typing.Tuple[typing.Optional[Image], typing.List[str]]: """Get image from web camera. apt-get install fswebcam """ img_path = f"/tmp/{uuid.uuid4().hex}.png" result = subprocess.run([ "/usr/bin/fswebcam", "-r", RESOLUTION, "--no-banner",
index=date_serie.tolist()) fig, ax = plt.subplots(figsize=(12, 8)) ax.plot(my_plot_1['original'], lw=3, color="b", alpha=.8, label="Original") ax.fill_between(date_serie.tolist(), my_plot_1['original'], my_guarantee, facecolor='green', where=my_plot_1['original'] >= my_guarantee) ax.fill_between(date_serie.tolist(), my_plot_1['original'], my_guarantee, facecolor='red', where=my_plot_1['original'] < my_guarantee) ax.axhline(my_guarantee, color="black") ax.legend() ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m')) ax.grid(True) ax.set_title('Capital Evolution', fontweight="bold", fontsize=30) plt.ylabel("Results in $", fontsize=20) plt.xlabel('Dates', fontsize=20) st.pyplot(fig) plt.close() with col02: # plot max drawdown percentage # step 1 calculate de drawdown evolution in a dataframe my_dd = max_dd_evol(my_guarantee, my_result) # step 2 generate the dataframe and plot my_plot_3 = pd.DataFrame((my_dd['pct_dd'] * (0 - 100)).tolist(), columns=['drawdown'], index=date_serie.tolist()) my_plot_3['max_drawdown'] = (my_dd['pct_max_dd'] * (0 - 100)).tolist()
dt.strftime(tmpStrtDT, '%Y%m%d'), dt.strftime(tmpStrtDT, '%H%M'), dt.strftime(tmpEndDT, '%Y%m%d'), dt.strftime(tmpEndDT, '%H%M%S')) else: saveDTstr = '{}_{}-{}_{}'.format( dt.strftime(tmpStrtDT, '%Y%m%d'), dt.strftime(tmpStrtDT, '%H%M'), dt.strftime(tmpEndDT, '%Y%m%d'), dt.strftime(tmpEndDT, '%H%M')) print('\tPlotting {}'.format(titleDTstr)) # If our plotting period is longer than 5 minutes, don't show seconds in the xtick labels if tDelta <= datetime.timedelta(minutes=5): xtick_formatter = mdates.DateFormatter(fmt='%H:%M:%S') else: xtick_formatter = mdates.DateFormatter(fmt='%H:%M') # Find start and end indices most closely matching current plotting frame bounds hcr_tMatchStrt = min(hcr_time1d_rnd, key=lambda x: abs(pd.to_datetime(x) - tmpStrtDT)) hcr_tmpStIx = np.squeeze(np.where(hcr_time1d_rnd == hcr_tMatchStrt))[0] hcr_tMatchEnd = min(hcr_time1d_rnd, key=lambda x: abs(pd.to_datetime(x) - tmpEndDT)) hcr_tmpEndIx = np.squeeze( np.where(hcr_time1d_rnd == hcr_tMatchEnd))[-1] hsrl_tMatchStrt = min(hsrl_time1d, key=lambda x: abs(x - tmpStrtDT)) hsrl_tmpStIx = np.squeeze(np.where(hsrl_time1d == hsrl_tMatchStrt)) hsrl_tMatchEnd = min(hsrl_time1d, key=lambda x: abs(x - tmpEndDT))
def plot_stats(statfile): colors = {"ACQUISITION": "b", "SCIENCE": "r", "FOCUS": "g", "GUIDER": "k"} s = np.genfromtxt(statfile, delimiter=",", dtype=None) s.sort(order="f2") s = s[s["f3"] > 1] day_frac_diff = datetime.timedelta( np.ceil((datetime.datetime.now() - datetime.datetime.utcnow()).total_seconds()) / 3600 / 24) datestat = np.array([time_utils.jd2utc(jd) for jd in s["f2"]]) datestat = datestat + day_frac_diff #We add 5h to the UTC date, so it alwasy keeps the date of the end of the night. day = ("%s" % (datestat[-1] + datetime.timedelta(5. / 24))).split()[0] xfmt = md.DateFormatter('%H:%M') f, ((ax1, ax2), (ax3, ax4), (ax5, ax6)) = plt.subplots(3, 2) plt.suptitle("Statistics %s" % day) f.set_figwidth(16) f.set_figheight(12) ax1.plot(datestat, s["f3"], ".-") ax1.set_title('Number of bright sources extracted') for im in set(s["f9"]): mask = s["f9"] == im ax2.plot(datestat[mask], s["f4"][mask], ".", color=colors[im], label=im) ax2.set_title('FWHM [arcsec]') ax3.plot(datestat, s["f6"], ".-") ax3.set_title('Background') ax4.plot(datestat, s["f7"], ".-") ax4.set_title('Airmass') ax5.plot(datestat, s["f8"], ".-", label="Inside") ax5.plot(datestat, s["f10"], ".-", label="Outside") #ax5.plot(datestat, s["f11"], ".-") ax5.set_title('Temperature') ax6.plot(datestat, s["f5"], ".-") ax6.set_title('Ellipticity') ax1.xaxis.set_major_formatter(xfmt) ax2.xaxis.set_major_formatter(xfmt) ax3.xaxis.set_major_formatter(xfmt) ax4.xaxis.set_major_formatter(xfmt) ax5.xaxis.set_major_formatter(xfmt) ax6.xaxis.set_major_formatter(xfmt) labels = ax1.get_xticklabels() plt.setp(labels, rotation=30, fontsize=10) labels = ax2.get_xticklabels() plt.setp(labels, rotation=30, fontsize=10) labels = ax3.get_xticklabels() plt.setp(labels, rotation=30, fontsize=10) labels = ax4.get_xticklabels() plt.setp(labels, rotation=30, fontsize=10) labels = ax5.get_xticklabels() plt.setp(labels, rotation=30, fontsize=10) labels = ax6.get_xticklabels() plt.setp(labels, rotation=30, fontsize=10) ax2.legend(labelspacing=0.3, loc="upper right", fontsize=11, numpoints=1, frameon=False, ncol=1, fancybox=False, shadow=True, bbox_to_anchor=(1., 1.)) ax5.legend(labelspacing=0.3, loc="upper left", fontsize=11, numpoints=1, frameon=False, ncol=1, fancybox=False, shadow=True, bbox_to_anchor=(0., 1.)) plt.savefig(statfile.replace(".log", "%s.png" % (day)), bbox="tight")
def source_solar_angle(catalogue, ref_antenna): """Source solar angle. The solar separation angle (in degrees) from the target observation region as seen by the ref_ant Parameters ---------- catalogue: list or file Data on the target objects to be observed ref_antenna: katpoint.Antenna A MeerKAT reference antenna Returns -------- solar separation angle for a target wrst ref_ant at a given time """ date = ref_antenna.observer.date horizon = numpy.degrees(ref_antenna.observer.horizon) date = date.datetime().replace(hour=0, minute=0, second=0, microsecond=0) numdays = 365 date_list = [date - timedelta(days=x) for x in range(0, numdays)] sun = katpoint.Target("Sun, special") target_tags = get_filter_tags(catalogue, targets=True) katpt_targets = catalogue.filter(target_tags) for cnt, katpt_target in enumerate(katpt_targets): plt.figure(figsize=(17, 7), facecolor="white") ax = plt.subplot(111) plt.subplots_adjust(right=0.8) fontP = FontProperties() fontP.set_size("small") solar_angle = [] for the_date in date_list: ref_antenna.observer.date = the_date sun.body.compute(ref_antenna.observer) katpt_target.body.compute(ref_antenna.observer) solar_angle.append( numpy.degrees(ephem.separation(sun.body, katpt_target.body))) myplot, = plt.plot_date(date_list, solar_angle, fmt=".", linewidth=0, label="{}".format(katpt_target.name)) ax.axhspan(0.0, horizon, facecolor="k", alpha=0.2) box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.95, box.height]) plt.grid() plt.legend(loc="center left", bbox_to_anchor=(1, 0.5), prop={"size": 10}, numpoints=1) plt.ylabel("Solar Separation Angle (degrees)") ax.set_xticklabels(date_list[0::20], rotation=30, fontsize=10) ax.xaxis.set_major_formatter(mdates.DateFormatter("%b %d")) ax.xaxis.set_major_locator( mdates.DayLocator(bymonthday=range(30), interval=10)) ax.set_xlabel("Date")
def plot_time(output, lines_list, elapsed_flag, start_index, num_steps): fig = plt.figure() title = "Time Series Plot" fig.canvas.set_window_title(title) plt.title(title) left_y_plot = fig.add_subplot(111) right_y_plot = None lines_plotted = [] line_legends = [] x_values = [] for time_index in range(start_index, num_steps): elapsed_hours = output.elapsed_hours_at_index(time_index) if elapsed_flag: x_values.append(elapsed_hours) else: x_values.append(output.StartDate + datetime.timedelta(hours=elapsed_hours)) left_y_plot.xaxis.set_major_formatter( dates.DateFormatter('%Y-%m-%d %H:%M')) for line in lines_list: type_label, object_name, attribute_name, axis, legend_text = line.split( ',', 4) item = output.get_items(type_label)[object_name] if item: attribute = item.get_attribute_by_name(attribute_name) y_values = item.get_series(output, attribute, start_index, num_steps) if y_values: if axis == "Left": plot_on = left_y_plot else: if not right_y_plot: right_y_plot = fig.add_subplot(111, sharex=left_y_plot, frameon=False) right_y_plot.yaxis.set_label_position("right") right_y_plot.yaxis.tick_right( ) # Only show right-axis tics on right axis left_y_plot.yaxis.tick_left( ) # Only show left-axis tics on left axis plot_on = right_y_plot color = colorsys.hsv_to_rgb(np.random.rand(), 1, 1) legend_text = legend_text.strip('"') new_line = plot_on.plot(x_values, y_values, label=legend_text, c=color)[0] lines_plotted.append(new_line) line_legends.append(legend_text) old_label = plot_on.get_ylabel() units = attribute.units(output.unit_system) if not old_label: plot_on.set_ylabel(units) elif units not in old_label: plot_on.set_ylabel(old_label + ', ' + units) # fig.suptitle("Time Series Plot") # plt.ylabel(parameter_label) if elapsed_flag: plt.xlabel("Time (hours)") else: plt.xlabel("Time") fig.autofmt_xdate() if not right_y_plot: plt.grid( True ) # Only show background grid if there is only a left Y axis plt.legend(lines_plotted, line_legends, loc="best") plt.show(block=False)