def plot(self, *args, **kwargs): tss = self.eval() if 'ax' in kwargs: ax = kwargs['ax'] else: ax = gca() if 'R' in self.axis: kwargsr = kwargs.copy() kwargsr['ax'] = ax.twinx() for ts,h,a in zip(tss, self.hidden, self.axis): if not h and type(ts) != type(''): if a!='R': kw = kwargs.copy() #ts.plot(*args, **kwargs) else: kw = kwargsr.copy() l = ax.plot([],label=ts.name) kw['color'] = l[0].get_color() if a=='V': if ts[0] != 0: ds = [ts.index[0]] else: ds = [] for d, x, lx in zip(ts.index, ts, ts.shift(1))[1:]: if (x==0) != (lx==0): ds.append(d) if len(ds) % 2 != 0: ds.append(es[1].index[-1]) it = iter(ds) for pair in izip(it,it): x1=date2num(pair[0]) x2=date2num(pair[1]) ax.axvspan(x1,x2, facecolor='b', alpha=0.16) else: kw['label'] = ts.name ts.dropna().plot(*args, **kw)
def autoscale(self): """ Set the view limits to include the data range. """ dmin, dmax = self.datalim_to_dt() if dmin > dmax: dmax, dmin = dmin, dmax delta = relativedelta(dmax, dmin) # We need to cap at the endpoints of valid datetime try: start = dmin - delta except ValueError: start = _from_ordinal(1.0) try: stop = dmax + delta except ValueError: # The magic number! stop = _from_ordinal(3652059.9999999) dmin, dmax = self.datalim_to_dt() vmin = dates.date2num(dmin) vmax = dates.date2num(dmax) return self.nonsingular(vmin, vmax)
def plot_date_bars(bin_data, bin_edges, title, ylabel, fname): """ Semi-generic function to plot a bar graph, x-label is fixed to "date" and the x-ticks are formatted accordingly. To plot a histogram, the histogram data must be calculated manually outside this function, either manually or using :py:func`numpy.histogram`. :param bin_data: list of data for each bin :param bin_edges: list of bin edges (:py:class:`datetime.date` objects), its length must be ``len(data)+1`` :param title: title of the plot :param ylabel: label of y-axis :param fname: output file name """ import matplotlib.pyplot as plt from matplotlib.dates import date2num, num2date from matplotlib import ticker plt.figure() # clear previous figure plt.title(title) plt.xlabel("date") plt.ylabel(ylabel) # plot the bars, width of the bins is assumed to be fixed plt.bar(date2num(bin_edges[:-1]), bin_data, width=date2num(bin_edges[1]) - date2num(bin_edges[0])) # x-ticks formatting plt.gca().xaxis.set_major_formatter(ticker.FuncFormatter(lambda numdate, _: num2date(numdate).strftime('%Y-%m-%d'))) plt.gcf().autofmt_xdate() plt.tick_params(axis="x", which="both", direction="out") plt.xticks([date2num(ts) for ts in bin_edges if ts.month % 12 == 1]) plt.savefig(fname, papertype="a4")
def calc_bar_colors(measure, end_date, is_planning): """Returns calculated bars. The bars are aggregated from measure_status_moments from sub measures. ** measure can also be a measure_collection. It uses the status_moment function only. """ measure_bar = [] measure_colors = [] measure_status_moments = measure.measure_status_moments( end_date=end_date, is_planning=is_planning) for msm_index, msm in enumerate(measure_status_moments): # drawing enddate: "infinity" or next status moment if msm_index == len(measure_status_moments) - 1: msm_end_date = end_date else: if is_planning: msm_end_date = measure_status_moments[ msm_index + 1].planning_date else: msm_end_date = measure_status_moments[ msm_index + 1].realisation_date if is_planning: begin = msm.planning_date else: begin = msm.realisation_date date_length = date2num(msm_end_date) - date2num(begin) measure_bar.append((date2num(begin), date_length)) measure_colors.append(msm.status.color.html) return measure_bar, measure_colors
def plotAotiHour(): fig = plt.figure(figsize=(18, 5)) rect = fig.patch rect.set_facecolor("white") df = pd.read_csv("urban-country/aoti_pm25.csv", sep=",", header=0) df = df[df["date"] >= 20140514] df = df[df["date"] <= 20140527] df["date"] = df["date"].astype(str) df["hour"] = df["hour"].astype(str) dateAndTime = pd.to_datetime(df["date"] + df["hour"], format="%Y%m%d%H") aoti = df["奥体中心"].tolist() ts_aoti = Series(aoti, index=dateAndTime) plot = ts_aoti.plot(linestyle="-", color="black", marker="8", markersize=4, label=u"奥体中心") time = dt.datetime(2014, 05, 17, 10) df = df[df["date"] == "20140517"] df = df[df["hour"] == "10"] value = df.iloc[0, 3] print mdates.date2num(time), value plt.annotate( u"aoti24", xy=(mdates.date2num(time), value), xytext=(30, 20), textcoords="offset points", arrowprops=dict(arrowstyle="-|>"), ) plt.show()
def plot(startday, filename, col1, col2, col3): now = datetime.now() now = time.mktime(now.timetuple()) df = pd.read_csv(filename,delimiter='\t',index_col=0) df = df.dropna(axis=0) plt.subplot(211) if startday < 0: startindex = 0 else: startindex = df['date'].searchsorted(now - startday * 24 * 3600)[0] print startindex df['date'] = df['date'].apply(lambda x:mdates.date2num(convert_to_localtime(x))) #plt.bar(df.iloc[::][col1],df.iloc[::][col2],color='black') plt.hist(df.iloc[startindex:][col2], [-1,0,1,2],color='black') plt.subplot(212) # ax.set_ylim(0,100000) # ax2 = plt.subplot(212,sharex=ax) plt.scatter(df.iloc[::][col1],df.iloc[::][col3],color='blue') xrange1 = mdates.date2num(convert_to_localtime(now-startday*24*3600)) xrange2 = mdates.date2num(convert_to_localtime(now)) # plt.xlim(xrange1, xrange2) # plt.ylim(7500,12000) # ax.axis.set_major_formatter(mdates.DateFormatter('%m-%d %H:%M')) plt.grid(True) # for label in ax2.xaxis.get_ticklabels(): # label.set_rotation(45) plt.subplots_adjust(left=0.09, bottom=0.2, right=0.94, top=0.90, wspace=0.12, hspace=0) plt.show()
def plot_chart(*tfs, norm=True, realtime=True, markers = None): import matplotlib.dates as mdt fig, ax = plt.subplots() if len(tfs) == 1 and norm is True: y = [(mdt.date2num(candle.DateTime), candle.Open, candle.Close, candle.High, candle.Low) for candle in tfs[0].container] candlestick(ax, y, width=0.4, colorup='r', colordown='b') ax.xaxis_date() plt.title(tfs[0].symbol + " chart") if markers is not None: marker_y, marker_x = [], [] for elem in tfs[0].container: if elem.DateTime in markers: marker_y.append(.5*(elem.Open+elem.Close)) marker_x.append(mdt.date2num(elem.DateTime)) plt.plot(marker_x, marker_y, 'gD') else: for tf in tfs: if realtime is True: x = tf.get_Time_list() else: x = range(len(tf)) y = np.array(tf.get_C_list()) if norm is True: y -= y.min() y /= y.max() plt.plot(x, y, label=tf.symbol) plt.title("Charts graph") plt.legend(loc='upper center', shadow=True) plt.xlabel("Time") plt.ylabel("Price") plt.grid() plt.show()
def __init__(self,fig): times,L,MLT,MLAT,InvLat,density=get_density_and_time('rbspa',datetime(2012,10,6),datetime(2012,10,10)) otimes=date2num(times) timesb,Lb,MLTb,MLATb,InvLatb,densityb=get_density_and_time('rbspb',datetime(2012,10,8),datetime(2012,10,9)) otimesb=date2num(timesb) self.L=L self.times=times self.otimes=otimes self.density=density self.emfisis_fit=emfisis_fit_model('rbspa') fitdensity,fituncert,inds=self.emfisis_fit(times,L,MLT,MLAT,InvLat,returnFull=True) self.fitdensity=fitdensity self.fig=fig self.ax1=plt.subplot(2,1,1) self.ax2=plt.subplot(2,1,2) self.ax2.plot(L,density,linestyle='',marker='.') self.Lline=None self.Llim=np.array([1.5,2]) self.pressed=False self.uinds=np.unique(inds) self.uinds=self.uinds[self.uinds<self.emfisis_fit.fitcoeffs.shape[0]] self.fig.canvas.mpl_connect('motion_notify_event',self.select_L) self.fig.canvas.mpl_connect('button_press_event',self.select_Lstart) self.fig.canvas.mpl_connect('button_release_event',self.on_release)
def handle(self, *args, **options): # Get user join dates User = get_user_model() datetimes = User.objects.values_list('date_joined', flat=True) \ .order_by('date_joined') dates = map(lambda d: d.date(), datetimes) # Get some auxilliary values min_date = date2num(dates[0]) max_date = date2num(dates[-1]) days = max_date - min_date + 1 # Initialize X and Y axes x = np.arange(min_date, max_date + 1) y = np.zeros(days) # Iterate over dates, increase registration array for date in dates: index = int(date2num(date) - min_date) y[index] += 1 y_sum = np.cumsum(y) # Plot plt.plot_date(x, y_sum, xdate=True, ydate=False, ls='-', ms=0, color='#16171E') plt.fill_between(x, 0, y_sum, facecolor='#D0F3FF') plt.title('Studentenportal: Registrierte Benutzer') plt.rc('font', size=8) if options['save']: plt.savefig(options['save']) else: plt.show()
def on_loadQuoteClicked(self): logger.info('load quote') fileName = QtGui.QFileDialog.getOpenFileName( self, self.tr("Open Quote Data"), data_path, self.tr("Quote Files (*.csv)")) logger.info("Filename %s" % fileName) if os.path.isfile(fileName): df = pd.read_csv(unicode(fileName)) df.columns = [col.lower() for col in df.columns] if 'datetime' in df.columns: df = df.sort(['datetime']) df['datetime'] = df.apply( lambda row: mdates.date2num(parser.parse(row['datetime'])), axis=1) elif 'date' in df.columns: df = df.sort(['date']) df['datetime'] = df.apply( lambda row: mdates.date2num(parser.parse(row['date'])), axis=1) if 'datetime' in df.columns and not df['datetime'].empty: self.ui_controller.matplotlibWidget.set_data(df) self.ui_controller.matplotlibWidget.draw_data() self.df = df
def parse_file_to_dict(data_dict, samp_int_dict, file, counter, format=None, verbose=False, ignore_links=False): from matplotlib.dates import date2num if ignore_links and os.path.islink(file): print("Ignoring symlink: %s" % (file)) return counter try: stream = read(file, format=format, headonly=True) except: print("Can not read %s" % (file)) return counter s = "%s %s" % (counter, file) if verbose: sys.stdout.write("%s\n" % s) for line in str(stream).split("\n"): sys.stdout.write(" " + line + "\n") else: sys.stdout.write("\r" + s) sys.stdout.flush() for tr in stream: _id = tr.getId() data_dict.setdefault(_id, []) data_dict[_id].append([date2num(tr.stats.starttime), date2num(tr.stats.endtime)]) try: samp_int_dict.\ setdefault(_id, 1. / (24 * 3600 * tr.stats.sampling_rate)) except ZeroDivisionError: print("Skipping file with zero samlingrate: %s" % (file)) return counter return (counter + 1)
def plot_temp(): data = read_temps() dates, values = map(np.array, zip(*[(d['date'], d['temperature']) for d in data])) tmp = (date2num(dates) % 1.0)*24.0 ii = np.where((tmp > 0) & (tmp < 8))[0] continuum = get_continuum(dates, dates[ii], values[ii]) setup(figsize=(12,6)) setupplot(subplt=(1,2,1), autoticks=True, xlabel='Date',) pylab.plot(dates, values) pylab.plot(dates[ii], values[ii], '.r') pylab.plot(dates, continuum, '.k') plot_weather(np.min(date2num(dates))) # pylab.plot(dates, values-continuum+38, '.r') dateticks('%Y.%m.%d') setupplot(subplt=(2,2,2), autoticks=False, xlabel='Hour of Day') pylab.plot(tmp, values, '.') setupplot(subplt=(2,2,2), ylabel='', secondax=True) setupplot(subplt=(2,2,4), autoticks=False, xlabel='Hour of Day') sc = pylab.scatter(tmp, values-continuum+TNORM, c=date2num(dates)-np.min(date2num(dates)), s=15, marker='.', edgecolor='none', label='Days since Start') setupplot(subplt=(2,2,4), ylabel='', secondax=True) hcolorbar(sc, axes=[0.75, 0.42, 0.1, 0.01]) pylab.tight_layout() pylab.show()
def generate_count(counts): import matplotlib from matplotlib.pyplot import figure, show from matplotlib.dates import DateFormatter print len(counts) fig = figure() matplotlib.rcParams['font.size'] = 8.0 ax = fig.add_subplot(111) ax.plot_date(counts.keys(),counts.values(), markersize=2) ax.set_title('Nb of actions per second') ax.set_ylabel('Nb of actions') ax.fmt_xdata = DateFormatter('%H:%M:%S') timestamp = counts.keys()[0] start = datetime.datetime(timestamp.year, timestamp.month, timestamp.day,7,0,0) end = datetime.datetime(timestamp.year, timestamp.month, timestamp.day,16,1,0) ax.set_xlim( date2num(start), date2num(end)) fig.autofmt_xdate() fig.subplots_adjust(left=.05, bottom=.2, right=.95, top=.9) filename = 'action_count.png' fig.set_size_inches(10., 3.) fig.savefig(filename) return filename
def Hierro_ecvd(data_file): #Import catalogue in IGN fixed width text file format columns = [slice(5,12), slice(17,27), slice(34,42), slice(50,57), slice(64,75), slice(85,87), slice(114,117)] output = [] myfile = open(data_file) for line in myfile: fields = [line[column].strip() for column in columns] output.append(fields) Cat1 = np.array(output) Cat1[Cat1==''] = 999 y=Cat1[1:,3:].astype(np.float) x=Cat1[1:,1:3] dates =[] times = [] for line in x: datevalues = mdates.date2num(dt.datetime.strptime(line[0], "%d/%m/%Y")) timevalues = mdates.date2num(dt.datetime.strptime(line[1], "%H:%M:%S"))-(mdates.date2num(dt.date(1900,01,01))) dates.append(datevalues) times.append(timevalues) datetimes = np.add(dates,times) datetimes = datetimes.reshape(len(dates),1) Cat1 = np.concatenate((datetimes,y), axis=1) Cat1 = Cat1[Cat1[:,0].argsort()] return Cat1
def plot_trend_graph_all_tests(self, save_path='', file_name='_trend_graph.png'): time_format1 = '%d-%m-%Y-%H:%M' time_format2 = '%Y-%m-%d-%H:%M' for test in self.tests: test_data = test.results_df[test.results_df.columns[2]].tolist() test_time_stamps = test.results_df[test.results_df.columns[3]].tolist() start_date = test_time_stamps[0] test_time_stamps.append(self.end_date + '-23:59') test_data.append(test_data[-1]) float_test_time_stamps = [] for ts in test_time_stamps: try: float_test_time_stamps.append(matdates.date2num(datetime.strptime(ts, time_format1))) except: float_test_time_stamps.append(matdates.date2num(datetime.strptime(ts, time_format2))) plt.plot_date(x=float_test_time_stamps, y=test_data, label=test.name, fmt='.-', xdate=True) plt.legend(fontsize='small', loc='best') plt.ylabel('MPPS/Core (Norm)') plt.title('Setup: ' + self.name) plt.tick_params( axis='x', which='both', bottom='off', top='off', labelbottom='off') plt.xlabel('Time Period: ' + start_date[:-6] + ' - ' + self.end_date) if save_path: plt.savefig(os.path.join(save_path, self.name + file_name)) if not self.setup_trend_stats.empty: (self.setup_trend_stats.round(2)).to_csv(os.path.join(save_path, self.name + '_trend_stats.csv')) plt.close('all')
def get_local_historical_data_from_db(self, _symbol, _convert, _is_intraday): data = None if os.path.isfile(FILE_GENERATOR.format(DB_PATH, _symbol)): saved_data = open(FILE_GENERATOR.format(DB_PATH, _symbol), "r") data = saved_data.read().split("\n") date_data = [] open_data = [] high_data = [] low_data = [] close_data = [] volume_data = [] for line in data: splitted_line = line.split(",") datetime_to_float = splitted_line[0] if _convert: if _is_intraday: datetime_to_float = mdates.date2num( datetime.datetime.strptime(splitted_line[0], "%Y-%m-%d %H:%M:%S") ) else: datetime_to_float = mdates.date2num( datetime.datetime.strptime(splitted_line[0], "%Y-%m-%d").replace( hour=16, minute=0, second=0 ) ) date_data.append(datetime_to_float) open_to_float = float(splitted_line[1]) open_data.append(open_to_float) high_to_float = float(splitted_line[2]) high_data.append(high_to_float) low_to_float = float(splitted_line[3]) low_data.append(low_to_float) close_to_float = float(splitted_line[4]) close_data.append(close_to_float) volume_to_float = float(splitted_line[5]) volume_data.append(volume_to_float) df = pd.DataFrame(np.array(date_data), columns=["Date"]) if _is_intraday: df["Date"] = range(0, len(np.array(open_data))) df["Open"] = np.array(open_data) df["High"] = np.array(high_data) df["Low"] = np.array(low_data) df["Close"] = np.array(close_data) df["Volume"] = np.array(volume_data) # df.index = date_data if df.isnull().values.any(): print "Erroneous data found in the db! It must be fixed manually!" return df else: print "{}: No db file for {}!".format(datetime.datetime.now(), _symbol)
def output_chart(chart, option): fig, ax = chart[:2] #ax.autoscale_view() xmin = date2num(datetime.fromtimestamp(option['view_start'])) xmax = date2num(datetime.fromtimestamp(option['view_end'])) ax.get_xaxis().set_view_interval(xmin, xmax, ignore=True) xmin, xmax = ax.get_xaxis().get_view_interval() ymin, ymax = ax.get_yaxis().get_view_interval() args = dict(color='black', linewidth=2) ax.add_artist(Line2D((xmin, xmax), (ymin, ymin), **args)) ax.add_artist(Line2D((xmax, xmax), (ymin, ymax), **args)) args = dict(color='black', linewidth=1) ax.add_artist(Line2D((xmin, xmax), (ymax, ymax), **args)) ax.add_artist(Line2D((xmin, xmin), (ymin, ymax), **args)) from matplotlib.dates import MinuteLocator locator = MinuteLocator(interval=int((option['view_end'] - option['view_start']) / 60 / 14)) ax.get_xaxis().set_major_locator(locator) locator = MinuteLocator(interval=int((option['view_end'] - option['view_start']) / 60 / 14 / 2)) ax.get_xaxis().set_minor_locator(locator) ax.get_xaxis().grid(which='both', color='#999999', linestyle=':', lw=0.3) ax.get_yaxis().grid(which='both', color='#999999', linestyle=':', lw=0.3) output = StringIO() plt.savefig(output, format='png', dpi=300) return output.getvalue()
def plotSEFD( infile, label ) : fin = open( infile, "r" ) pyplot.clf() pyplot.ion() fig = pyplot.subplot(1,1,1) for line in fin : if not line.startswith('#') : a = line.split() color = 'black' if (a[2] == "SGRA" ) : color = 'red' if (a[2] == "M87" ) : color = 'blue' t = datetime.datetime.strptime( a[3], "%H:%M:%S") tt = mpdates.date2num(t) Cr = float(a[7]) Cp = float(a[8]) Cplow = float(a[9]) Cphigh = float(a[10]) fig.plot_date( tt, Cp, marker="o", color=color, linewidth=3 ) fig.plot_date( tt, Cr, marker="s", fillstyle='full', alpha=0.5, color=color ) fig.plot_date( [tt,tt], [Cplow,Cphigh], "-", linewidth=2, color=color ) fin.close() fig.xaxis.set_major_locator(mpdates.HourLocator()) fig.xaxis.set_major_formatter(mpdates.DateFormatter( "%H" ) ) fig.set_yscale("log") tmin = mpdates.date2num( datetime.datetime.strptime( "00:00", "%H:%M") ) tmax = mpdates.date2num( datetime.datetime.strptime( "19:00", "%H:%M") ) fig.set_xlim( tmin-.02,tmax ) fig.set_ylim( 1.e3,1.e6 ) fig.text( .5, .94, label, verticalalignment="center", transform=fig.transAxes, \ horizontalalignment="center", fontsize=18 ) fig.grid() pyplot.xlabel( "UT (hrs)" ) pyplot.ylabel( "SEFD (Jy)" ) pyplot.show()
def runExample2(): import matplotlib.dates as dates dbfile = 'C:/Projects/GOMGalveston/DATA/GalvestonObs.db' outvar = ['NetCDF_Filename','NetCDF_GroupID','StationName'] tablename = 'observations' varname = 'waterlevel' condition = 'Variable_Name = "%s"' % varname data, query = queryNC(dbfile,outvar,tablename,condition) # Plot the results in one figure datemin = datetime(2011,6,1) datemax = datetime(2011,7,1) ylim = [27,35] # temp ylim = [0, 100] fig = plt.figure(figsize=(8,12)) plt.hold(True) k=0 for dd in data: # convert the time for plotting t = dates.date2num(dd['time']) k+=1 ax = plt.subplot(len(data),1,k) plt.plot(t,dd[varname],'b') plt.title('%s at %s'%(varname,query['StationName'][k-1])) # Format the x-ticks ax.set_xlim(dates.date2num(datemin), dates.date2num(datemax)) ax.set_ylim(ylim[0],ylim[1]) ax.grid(True) ax.xaxis.set_major_formatter(dates.DateFormatter('%d%b%Y')) fig.autofmt_xdate() plt.show()
def plot_group(self, group, idx, axis): # create x-coordinates for all log lines in this group x_start = date2num( [ logline.datetime for logline in self.groups[group] ] ) x_end = date2num( [ logline.end_datetime for logline in self.groups[group] ] ) color, marker = self.color_map(group) # duration plots require y coordinate and use plot_date y = [ getattr(logline, 'duration') for logline in self.groups[group] ] if self.logscale: axis.semilogy() # artist = axis.plot_date(x, y, color=color, markeredgecolor='k', marker=marker, alpha=0.7, \ # markersize=7, picker=5, label=group)[0] artists = [] labels = set() for i, (xs, xe, ye) in enumerate(zip(x_start, x_end, y)): artist = axis.plot_date([xs, xe], [0, ye], '-', color=color, alpha=0.7, linewidth=2, markersize=7, picker=5, label=None if group in labels else group)[0] labels.add(group) # add meta-data for picking artist._mt_plot_type = self artist._mt_group = group artist._mt_line_id = i artists.append(artist) return artists
def _create_interpolators(self,dates): """ Load the data from the RBSP spacecraft and create interpolator objects from it. """ times,Lstar,MLT,MLAT,InvLat,density,segmentbounds=self._getdata(self.scname,dates) interpolators=np.zeros((len(segmentbounds)-1,),dtype=object) MLT_interpolators=np.zeros((len(segmentbounds)-1,),dtype=object) MLAT_interpolators=np.zeros((len(segmentbounds)-1,),dtype=object) InvLat_interpolators=np.zeros((len(segmentbounds)-1,),dtype=object) segmentlimits=np.zeros((len(segmentbounds)-1,2)) for i in range(len(segmentbounds)-1): Lseg=Lstar[segmentbounds[i]:segmentbounds[i+1]] dseg=density[segmentbounds[i]:segmentbounds[i+1]] MLTseg=MLT[segmentbounds[i]:segmentbounds[i+1]] MLATseg=MLAT[segmentbounds[i]:segmentbounds[i+1]] InvLatseg=InvLat[segmentbounds[i]:segmentbounds[i+1]] tseg=times[segmentbounds[i]:segmentbounds[i+1]] inds=np.argsort(Lseg) interpolators[i]=UnivariateSpline(Lseg[inds],dseg[inds],**self.kwargs) if self.MLTDependence: MLT_interpolators[i]=UnivariateSpline(Lseg[inds],MLTseg[inds],**self.kwargs) if self.latitudeDependence: MLAT_interpolators[i]=UnivariateSpline(Lseg[inds],MLATseg[inds],**self.kwargs) InvLat_interpolators[i]=UnivariateSpline(Lseg[inds],InvLatseg[inds],**self.kwargs) segmentlimits[i,:]=date2num(tseg[0]),date2num(tseg[-1]) return segmentlimits,interpolators,MLT_interpolators,MLAT_interpolators,InvLat_interpolators
def __call__(self): # if no data have been set, this will tank with a ValueError _check_implicitly_registered() try: dmin, dmax = self.viewlim_to_dt() except ValueError: return [] if dmin > dmax: dmax, dmin = dmin, dmax # We need to cap at the endpoints of valid datetime # TODO(wesm) unused? # delta = relativedelta(dmax, dmin) # try: # start = dmin - delta # except ValueError: # start = _from_ordinal(1.0) # try: # stop = dmax + delta # except ValueError: # # The magic number! # stop = _from_ordinal(3652059.9999999) nmax, nmin = dates.date2num((dmax, dmin)) num = (nmax - nmin) * 86400 * 1000 max_millis_ticks = 6 for interval in [1, 10, 50, 100, 200, 500]: if num <= interval * (max_millis_ticks - 1): self._interval = interval break else: # We went through the whole loop without breaking, default to 1 self._interval = 1000. estimate = (nmax - nmin) / (self._get_unit() * self._get_interval()) if estimate > self.MAXTICKS * 2: raise RuntimeError(('MillisecondLocator estimated to generate %d ' 'ticks from %s to %s: exceeds Locator.MAXTICKS' '* 2 (%d) ') % (estimate, dmin, dmax, self.MAXTICKS * 2)) freq = '%dL' % self._get_interval() tz = self.tz.tzname(None) st = _from_ordinal(dates.date2num(dmin)) # strip tz ed = _from_ordinal(dates.date2num(dmax)) all_dates = date_range(start=st, end=ed, freq=freq, tz=tz).asobject try: if len(all_dates) > 0: locs = self.raise_if_exceeds(dates.date2num(all_dates)) return locs except Exception: # pragma: no cover pass lims = dates.date2num([dmin, dmax]) return lims
def demand_analysis(self,date=None,type='day',option=None): import calendar signal = 'Total_KW' # se estima la demanda PPP y PP en base a la estimacion del KW hecha a partir de AWH, BWH y CWH period = {'day':{'t1':date.replace(hour=0,minute=0,second=0,microsecond=0),\ 't2':date.replace(hour=18,minute=00,second=00),\ 't3':date.replace(hour=23,minute=00,second=00),\ 't4':date.replace(hour=23,minute=59,second=59)}} PPP = list(Measurements.objects.filter(Q(user= self.user),Q(sensor=Sensors.objects.get(pk=self.sensor_id)),\ Q(datetimestamp__gte = period[type]['t1'], datetimestamp__lte = period[type]['t2']) | Q(datetimestamp__gte = period[type]['t3'],datetimestamp__lte=period[type]['t4'])).extra(select=sql_extra_pairs[signal]['sql']).\ order_by('datetimestamp').\ values('datetimestamp',sql_extra_pairs[signal]['extra'])) #valor maximo parcialmente presente en punta diario (consumo) PP = list(Measurements.objects.filter(Q(user= self.user),Q(sensor=Sensors.objects.get(pk=self.sensor_id)),\ Q(datetimestamp__gte = period[type]['t2'], datetimestamp__lte = period[type]['t3'])).extra(select=sql_extra_pairs[signal]['sql']).\ order_by('datetimestamp').\ values('datetimestamp',sql_extra_pairs[signal]['extra'])) #exit if there are no data available if len(PP)==0 or len(PPP)==0: return {'PP':{'Value':0,'datetimestamp':date,'ave':0,'html':''},\ 'PPP':{'Value':0,'datetimestamp':date,'ave':0,'html':''}} power_analisis = {'PP':{'Value':0,'datetimestamp':date,'ave':0,'html':''},\ 'PPP':{'Value':0,'datetimestamp':date,'ave':0,'html':''}} #busqueda del maximo PP for r in range(len(PP)-1): delta_r = (date2num(PP[r+1]['datetimestamp'])-date2num(PP[r]['datetimestamp']))*24 power_r = float(PP[r+1][signal])/delta_r if power_r > power_analisis['PP']['Value']: power_analisis['PP']['Value'] = power_r power_analisis['PP']['datetimestamp'] = PP[r+1]['datetimestamp'] power_r=0 #reset #busqueda del maximo PP for r in range(len(PPP)-1): delta_r = (date2num(PPP[r+1]['datetimestamp'])-date2num(PPP[r]['datetimestamp']))*24 power_r = float(PPP[r+1][signal])/delta_r if power_r > power_analisis['PPP']['Value']: power_analisis['PPP']['Value'] = power_r power_analisis['PPP']['datetimestamp'] = PPP[r+1]['datetimestamp'] #preparacion de datos, formatos, ganancias etc. power_analisis['PPP']['Value'] = power_analisis['PPP']['Value']*ganancias_dict['Total_KW']['gain'] power_analisis['PP']['Value'] = power_analisis['PP']['Value']*ganancias_dict['Total_KW']['gain'] power_analisis['PP']['html'] = pretty_print(power_analisis['PP']['Value'],label=['KW','MW']) power_analisis['PPP']['html'] = pretty_print(power_analisis['PPP']['Value'],label=['KW','MW']) #se calcula la potencia promedio en base a la energia consumida en un periodo de tiempo y se divide por ese tiempo power_analisis['PPP']['ave'] = self.calculo_consumo(date=date,type='day',option=['energy_by_time_range','PPP'])['Total']['Value'] power_analisis['PP']['ave'] = self.calculo_consumo(date=date,type='day',option=['energy_by_time_range','PP'])['Total']['Value'] #print power_analisis return power_analisis
def getcodar_ctl_id(model_option,url,datetime_wanted): if model_option=='1': dtime=open_url(url+'?time') dd=dtime['time'] #print "This option has data from "+str(num2date(dd[0]+date2num(datetime.datetime(2009, 1, 1, 0, 0))))+" to "+str(num2date(dd[-1]+date2num(datetime.datetime(2009, 1, 1, 0, 0)))) print 'This option has data from '+dd[0].strftime("%B %d, %Y")+' to '+dd[-1] .strftime("%B %d, %Y") id=datetime_wanted-date2num(datetime.datetime(2009, 1, 1, 0, 0)) id=str(int(id)) if model_option=='6': dtime=open_url(url+'?time') dd=dtime['time'] ddd=[] #print 'This option has data from '+dd[0].strftime("%B %d, %Y")+' to '+dd[-1] .strftime("%B %d, %Y") id=datetime_wanted-date2num(datetime.datetime(2006, 1, 1, 0, 0)) id=str(int(id)) else: dtime=open_url(url+'?time') dd=dtime['time'] ddd=[] for i in list(dtime['time']): i=round(i,7) ddd.append(i) #print "This option has data from "+str(num2date(dd[0]+date2num(datetime.datetime(2001, 1, 1, 0, 0))))+" to "+str(num2date(dd[-1]+date2num(datetime.datetime(2001, 1, 1, 0, 0)))) #print 'This option has data from '+num2date(dd[0]).strftime("%B %d, %Y")+' to '+num2date(dd[-1]).strftime("%B %d, %Y") id=ml.find(np.array(ddd)==round(datetime_wanted-date2num(datetime.datetime(2001, 1, 1, 0, 0)),7)) for i in id: id=str(i) #print 'codar id is '+id return id
def getemolt_sensor(mindtime1,maxdtime1,i_mindepth,i_maxdepth,site2,mindtime,maxdtime): #According to the conditions to select data from "emolt_sensor" url2="http://gisweb.wh.whoi.edu:8080/dods/whoi/emolt_sensor?emolt_sensor.SITE,emolt_sensor.TIME_LOCAL,emolt_sensor.YRDAY0_LOCAL,emolt_sensor.TEMP,emolt_sensor.DEPTH_I&emolt_sensor.TIME_LOCAL>="+str(mindtime1)+"&emolt_sensor.TIME_LOCAL<="\ +str(maxdtime1)+"&emolt_sensor.DEPTH_I>="+str(i_mindepth)+"&emolt_sensor.DEPTH_I<="+str(i_maxdepth)+site2 try: dataset1=open_url(url2) except: print 'Sorry, '+url2+' not available' sys.exit(0) emolt_sensor=dataset1['emolt_sensor'] try: sites2=list(emolt_sensor['SITE']) except: print "'Sorry, According to your input, here are no value. please check it! ' " sys.exit(0) #sites2=list(emolt_sensor['SITE']) time=list(emolt_sensor['TIME_LOCAL']) yrday0=list(emolt_sensor['YRDAY0_LOCAL']) temp=list(emolt_sensor['TEMP']) depth1=list(emolt_sensor['DEPTH_I']) time1,temp1,yrday01,sites1,depth=[],[],[],[],[] for m in range(len(time)): #if mindtime<=dt.datetime.strptime(str(time[m]),'%Y-%m-%d')<=maxdtime: if date2num(mindtime)<=yrday0[m]%1+date2num(dt.datetime.strptime(str(time[m]),'%Y-%m-%d'))<=date2num(maxdtime): #if str(time[m])=='2012-01-01': temp1.append(temp[m]) yrday01.append(yrday0[m]%1+date2num(dt.datetime.strptime(str(time[m]),'%Y-%m-%d'))) sites1.append(sites2[m]) time1.append(date2num(dt.datetime.strptime(str(time[m]),'%Y-%m-%d'))) depth.append(depth1[m]) #print len(temp1) return time1,yrday01,temp1,sites1,depth,
def main(sta1, sta2, filterid, components, mov_stack=1, show=True, outfile=None): db = connect() components_to_compute = get_components_to_compute(db) maxlag = float(get_config(db,'maxlag')) cc_sampling_rate = float(get_config(db,'cc_sampling_rate')) start, end, datelist = build_movstack_datelist(db) # mov_stack = get_config(db,"mov_stack") plt.figure(figsize=(16,16)) sta1 = sta1.replace('.','_') sta2 = sta2.replace('.','_') if sta2 > sta1: # alphabetical order filtering! pair = "%s:%s"%(sta1,sta2) print("New Data for %s-%s-%i-%i"%(pair,components,filterid, mov_stack)) format = "matrix" nstack, stack_total = get_results(db,sta1,sta2,filterid,components,datelist,mov_stack, format=format) # vmax = scoreatpercentile(np.abs(stack_total[np.isnan(stack_total)==False]) , 98) # for i in range(stack_total.shape[0]): # if not np.all( np.isnan(stack_total[i,:])): # print np.max(stack_total[i,:]) # stack_total[i,:] /= np.max(stack_total[i,:]) # stack_total /= np.max(stack_total, axis=0) xextent = (date2num(start), date2num(end),-maxlag,maxlag) ax = plt.subplot(111) plt.imshow(stack_total.T, extent=xextent, aspect="auto",interpolation='none',origin='lower',cmap='seismic', vmin=-1e-2,vmax=1e-2) plt.ylabel("Lag Time (s)") plt.axhline(0,lw=0.5,c='k') plt.grid() ax.xaxis.set_major_locator( YearLocator() ) ax.xaxis.set_major_formatter( DateFormatter('%Y-%m') ) # ax.xaxis.set_minor_locator( MonthLocator(interval=2) ) # ax.xaxis.set_minor_formatter( DateFormatter('%Y-%m-%d') ) lag = 120 plt.ylim(-lag,lag) plt.title('%s : %s'%(sta1,sta2)) name = '%i.%s_%s.png'%(filterid,sta1,sta2) #~ plt.savefig('interfero_publi.png',dpi=300) # plt.figure() # maxx = np.argmax(stack_total, axis=0) # plt.plot(maxx) if outfile: if outfile.startswith("?"): pair = pair.replace(':','-') outfile = outfile.replace('?', '%s-%s-f%i-m%i' % (pair, components, filterid, mov_stack)) outfile = "interferogram " + outfile print("output to:", outfile) plt.savefig(outfile) if show: plt.show()
def PlotEntropyMapDate(dataset,L): f = open('aapl.csv','r') f.readline() data = [] dataL = [] counter = 0 for lines in f: ls = lines.split(',') data.append((DT.datetime.strptime(ls[0], "%d-%b-%y"),float(ls[-1]))) if counter%1000 == 0: dataL.append((DT.datetime.strptime(ls[0], "%d-%b-%y"),float(ls[-1]))) counter = counter + 1 x = [date2num(date) for (date, value) in data] y = [value for (date, value) in data] z = [EntropyFilter(Entropy([x,y], ii, L)) for ii in range(len(x))] xL = [date2num(date) for (date, value) in dataL] yL = [value for (date, value) in dataL] fig = plt.figure() graph = fig.add_subplot(111) graph.set_xticks(xL) graph.set_xticklabels([date.strftime("%d-%b-%y") for (date, value) in dataL]) X = np.array(x) Y = np.array(y) Z = np.array(z) plt.scatter(X, Y, s = Z*10, c = Z) plt.show()
def calc_strong_motion(): files1 = '/media/PBO/archive/20?[7890]/CX/PATCX/B[HL]Z.D/*.???' files2 = '/media/platte/Data/IPOC/raw_PATCX_2011_B-Z/*/*.???' acs = [] vels = [] for fname in glob(files1) + glob(files2): ms = read(fname) for tr in ms: if tr.stats.endtime - tr.stats.starttime < 1000: ms.remove(tr) continue tr.trim(tr.stats.starttime + 10, tr.stats.endtime - 10) tr.stats.filter = '' tr.detrend() tr.filter('highpass', freq=0.2) ms.merge(fill_value=0) if len(ms) == 0: continue tr = ms[0] t = tr.stats.starttime + (tr.stats.endtime - tr.stats.starttime) / 2 maxi = np.max(np.abs(tr.data)) if 'BH' in fname: vels.append((t, maxi)) elif 'BL' in fname: acs.append((t, maxi)) else: print 'wrong filename: ', fname dates_vel, vels = zip(*sorted(vels, key=itemgetter(0))) dates_ac, acs = zip(*sorted(acs, key=itemgetter(0))) np.savez('/home/richter/Results/IPOC/maxima_PATCX_5s.npz', dates_ac=date2num(dates_ac), dates_vel=date2num(dates_vel), vel=vels, ac=acs)
def add_plot(self, sources): "Add a subplot to this widget displaying all of the signals in names" rows = len(self.plots) + 1 for i, plot in enumerate(self.plots): plot.change_geometry(rows, 1, i+1) plot.label_outer() new_plot = LiveSubplot(self.find_source, self, rows, 1, rows) td = datetime.timedelta(seconds=self.timescale) now = datetime.datetime.utcnow() new_plot.set_xbound(mdates.date2num(now - td), mdates.date2num(now)) if len(sources) == 1: new_plot.set_title(sources[0]["name"]) for descr in sources: new_plot.add_signal(descr["identifier"], color=descr["color"], style=descr["style"]) self.figure.add_subplot(new_plot) self.plots.append(new_plot) return new_plot
def _format_plot(self): """Formats the plot, i.e. scales axes, sets ticks, etc.""" # enable grid self.ax_left.grid(True) # format x-axis daterange = self.end_date - self.start_date majorlocator, majorformatter, minorlocator = _get_locators(daterange) self.ax_left.set_xlim( dates.date2num(self.start_date), dates.date2num(self.end_date)) self.ax_left.xaxis.set_major_locator(majorlocator) self.ax_left.xaxis.set_major_formatter(majorformatter) self.ax_left.xaxis.set_minor_locator(minorlocator) # format y-axis y_min, y_max = self._get_ylimits(yaxis='left') if y_min is not None: self.ax_left.set_ylim(y_min, y_max) y_min, y_max = self._get_ylimits(yaxis='right') if y_min is not None: self.ax_right.set_visible(True) self.ax_right.set_ylim(y_min, y_max) else: self.ax_right.set_visible(False) # set label on y-axis self.ax_left.set_ylabel(self.ylabels[self.left_datatype]) self.ax_right.set_ylabel(self.ylabels[self.right_datatype])
def create_header(self, so, psd=False): #matplotlib options for graph font = {'family': 'DejaVu Sans', 'size': 14} matplotlib.rc('font', **font) plt.rcParams['figure.figsize'] = (16, 10) plt.rcParams['figure.facecolor'] = 'white' #make the figure self.figure = plt.figure(figsize=(16, 10)) #Get the time nums for the statistics first_date = uc.convert_ms_to_date(so.stat_dictionary['time'][0], pytz.UTC) last_date = uc.convert_ms_to_date(so.stat_dictionary['time'][-1], pytz.UTC) new_dates = uc.adjust_from_gmt([first_date,last_date], \ so.timezone,so.daylight_savings) first_date = mdates.date2num(new_dates[0]) last_date = mdates.date2num(new_dates[1]) time = so.stat_dictionary['time'] self.time_nums = np.linspace(first_date, last_date, len(time)) #Get the time nums for the wave water level first_date = uc.convert_ms_to_date(so.sea_time[0], pytz.UTC) last_date = uc.convert_ms_to_date(so.sea_time[-1], pytz.UTC) new_dates = uc.adjust_from_gmt([first_date,last_date], \ so.timezone,so.daylight_savings) first_date = mdates.date2num(new_dates[0]) last_date = mdates.date2num(new_dates[1]) time = so.sea_time self.time_nums2 = np.linspace(first_date, last_date, len(time)) #Read images logo = image.imread('usgs.png', None) #Create grids for section formatting if psd == False: self.grid_spec = gridspec.GridSpec(2, 2, width_ratios=[1, 2], height_ratios=[1, 4]) else: self.grid_spec = gridspec.GridSpec(2, 2, width_ratios=[1, 2], height_ratios=[1, 7]) #---------------------------------------Logo Section ax2 = self.figure.add_subplot(self.grid_spec[0, 0]) ax2.set_axis_off() ax2.axes.get_yaxis().set_visible(False) ax2.axes.get_xaxis().set_visible(False) pos1 = ax2.get_position() # get the original position if psd == False: pos2 = [pos1.x0, pos1.y0 + .07, pos1.width, pos1.height] ax2.set_position(pos2) # set a new position else: pos2 = [ pos1.x0 - .062, pos1.y0 + .01, pos1.width + .1, pos1.height + .05 ] ax2.set_position(pos2) # set a new position #display logo ax2.imshow(logo)
def IDL_time_to_num(time_in): #convert IDL time to matplotlib datetime time_num=np.zeros(np.size(time_in)) for ii in np.arange(0,np.size(time_in)): time_num[ii]=mdates.date2num(sunpy.time.parse_time(time_in[ii])) return time_num
mo_pdyn=i.icmecat['MO_PDYN'] mo_pdyn_std=i.icmecat['MO_PDYN_STD'] #get indices of events by different spacecraft ivexind=np.where(isc == 'VEX') istaind=np.where(isc == 'STEREO-A') istbind=np.where(isc == 'STEREO-B') iwinind=np.where(isc == 'Wind') imesind=np.where(isc == 'MESSENGER') iulyind=np.where(isc == 'ULYSSES') imavind=np.where(isc == 'MAVEN') #take MESSENGER only at Mercury, only events after orbit insertion imercind=np.where(np.logical_and(isc =='MESSENGER',icme_start_time_num > mdates.date2num(sunpy.time.parse_time('2011-03-18')))) #limits of solar minimum, rising phase and solar maximum minstart=mdates.date2num(sunpy.time.parse_time('2007-01-01')) minend=mdates.date2num(sunpy.time.parse_time('2009-12-31')) risestart=mdates.date2num(sunpy.time.parse_time('2010-01-01')) riseend=mdates.date2num(sunpy.time.parse_time('2011-06-30')) maxstart=mdates.date2num(sunpy.time.parse_time('2011-07-01')) maxend=mdates.date2num(sunpy.time.parse_time('2014-12-31')) #extract events by limits of solar min, rising, max, too few events for MAVEN and Ulysses
107, 'Mar 2019', 42, 108, 'Apr 2019', 42, 107, ] # Strip out the dates from membership_data date_strings = membership_data[0::3] # Convert date strings into numbers date_nums = [] for d in date_strings: date_nums.append(date2num(datetime.strptime(d, '%b %Y'))) # Strip out the number of libmesh-devel subscribers from membership_data devel_count = membership_data[1::3] # Strip out the number of libmesh-users subscribers from membership_data users_count = membership_data[2::3] # Get a reference to the figure fig = plt.figure() # 111 is equivalent to Matlab's subplot(1,1,1) command ax = fig.add_subplot(111) # The colors used come from sns.color_palette("muted").as_hex() They # are the "same basic order of hues as the default matplotlib color
def time_series(self,varname,x,y,times=None,depth=None,**opts): coords=opts.get('coords',self._default_coords('time_series')).split(',') if times is None: times=range(0,self.time.size) # depth or s_level: check is is float or if is negative! isDepth=False if not depth is None: if calc.isiterable(depth): depth=np.asarray(depth) if calc.isarray(depth): isDepth=np.any(depth<0) or depth.kind!='i' else: isDepth=depth<0 or np.asarray(depth).dtype.kind!='i' out=Data() if not depth is None and not isDepth: out.msg=self.check_slice(varname,t=np.max(times),k=depth) else: out.msg=self.check_slice(varname,t=np.max(times)) if out.msg: return out # find nearest point: lon,lat,hr,mr=self.grid.vars(ruvp=self.var_at(varname)) dist=(lon-x)**2+(lat-y)**2 i,j=np.where(dist==dist.min()) i,j=i[0],j[0] if not depth is None and not isDepth: arg={'s_SEARCH':depth} else: arg={} v=self.use(varname,xiSEARCH=j,etaSEARCH=i,SEARCHtime=times,**arg).T # calculate depths: if self.hasz(varname): h=self.grid.h[i,j] zeta=self.use('zeta',xiSEARCH=j,etaSEARCH=i,SEARCHtime=times) h=h+0*zeta #### z=rt.s_levels(h,zeta,self.s_params,rw=varname) z=rt.s_levels(h,zeta,self.s_params,rw=self.var_at(varname)[1]) z=np.squeeze(z) # depth slice: if isDepth and self.hasz(varname): if v.ndim==2: # could use calc.griddata, but better use slicez cos data at # different times may be independent! if 0: from matplotlib.dates import date2num t=np.tile(date2num(self.time[times]),(v.shape[0],1)) v=calc.griddata(t,z,v,t[0],depth+np.zeros(t[0].shape), extrap=opts.get('extrap',False),norm_xy=opts.get('norm_xy',False)) # norm_xy True may be needed! # extrap also may be needed cos the 1st and last value may be masked! else: nt=len(times) land_mask=np.ones((nt,1),dtype=v.dtype) # needed for slicez... not used here! v,vm=rt.slicez(v[...,np.newaxis],land_mask, self.grid.h[i,j]*np.ones((nt,1),dtype=v.dtype), # bottom depth zeta[:,np.newaxis],self.s_params,depth,spline=opts.get('spline',True)) v=np.ma.masked_where(vm,v) v=v[...,0] else: # one time only v=np.interp(depth,z,v,left=np.nan,right=np.nan) v=np.ma.masked_where(np.isnan(v),v) out.v=v out.info['v']['name']=varname out.info['v']['slice']='time series' try: out.info['v']['units']=netcdf.vatt(self.nc,varname,'units') except: pass # coords if 't' in coords and self.hast(varname): if v.ndim==2: out.t=np.tile(self.time[times],(v.shape[0],1)) from matplotlib.dates import date2num out.tnum=np.tile(date2num(self.time[times]),(v.shape[0],1)) else: out.t=self.time[times] out.info['t']['name']='Time' out.info['tnum']=dict(name='Time',units=self.var_as['time']['units']) if 'z' in coords and self.hasz(varname): if not depth is None: if not isDepth: out.z=z[depth,...] else: out.z=depth+0*v else: out.z=z out.info['z']=dict(name='Depth',units='m') if 'x' in coords: out.x=lon[i,j] if self.grid.is_spherical: out.info['x']=dict(name='Longitude',units=r'$\^o$E') else: out.x=x/1000. out.info['x']=dict(name='X-position',units='km') if 'y' in coords: out.y=lat[i,j] if self.grid.is_spherical: out.info['y']=dict(name='Latitude',units=r'$\^o$N') else: out.y=y/1000. out.info['y']=dict(name='Y-position',units='km') out.coordsReq=','.join(sorted(coords)) return out
def do_plot(self, wallet, history): balance_Val = [] fee_val = [] value_val = [] datenums = [] unknown_trans = 0 pending_trans = 0 counter_trans = 0 balance = 0 for item in history: tx_hash, confirmations, value, timestamp, balance = item if confirmations: if timestamp is not None: try: datenums.append( md.date2num( datetime.datetime.fromtimestamp(timestamp))) balance_Val.append(1000. * balance / COIN) except [RuntimeError, TypeError, NameError] as reason: unknown_trans += 1 pass else: unknown_trans += 1 else: pending_trans += 1 value_val.append(1000. * value / COIN) if tx_hash: label, is_default_label = wallet.get_label(tx_hash) label = label.encode('utf-8') else: label = "" f, axarr = plt.subplots(2, sharex=True) plt.subplots_adjust(bottom=0.2) plt.xticks(rotation=25) ax = plt.gca() x = 19 test11 = "Unknown transactions = " + str( unknown_trans) + " Pending transactions = " + str( pending_trans) + " ." box1 = TextArea(" Test : Number of pending transactions", textprops=dict(color="k")) box1.set_text(test11) box = HPacker(children=[box1], align="center", pad=0.1, sep=15) anchored_box = AnchoredOffsetbox( loc=3, child=box, pad=0.5, frameon=True, bbox_to_anchor=(0.5, 1.02), bbox_transform=ax.transAxes, borderpad=0.5, ) ax.add_artist(anchored_box) plt.ylabel('mBOLI') plt.xlabel('Dates') xfmt = md.DateFormatter('%Y-%m-%d') ax.xaxis.set_major_formatter(xfmt) axarr[0].plot(datenums, balance_Val, marker='o', linestyle='-', color='blue', label='Balance') axarr[0].legend(loc='upper left') axarr[0].set_title('History Transactions') xfmt = md.DateFormatter('%Y-%m-%d') ax.xaxis.set_major_formatter(xfmt) axarr[1].plot(datenums, value_val, marker='o', linestyle='-', color='green', label='Value') axarr[1].legend(loc='upper left') # plt.annotate('unknown transaction = %d \n pending transactions = %d' %(unknown_trans,pending_trans),xy=(0.7,0.05),xycoords='axes fraction',size=12) plt.show()
} out_st = Stream([Trace(data=np.array(data), header=stats)]) out_st.write(year_rsam_file, format='MSEED', reclen=256) # Make RSAM plot folder if required if not os.path.exists('./output/'): os.makedirs('./output') # Plot RSAM data for stream in streams: # Set plot dates yd1 = date2num(date - datetime.timedelta(days=num_plot_days)) + 1 yd2 = date2num(date) dates = num2date(np.arange(yd1, yd2, 1)) # Gather data for plotting frequency_ranges = args.filter_ranges.split('],[') frequency_banded_traces = [] for frequency_range in frequency_ranges: frequency_bounds = frequency_range.replace('[', '').replace(']', '').split(',') print('Plotting 10-minute mean RSAM values for ' + stream + ' between dates ' + str(date - datetime.timedelta(days=num_plot_days))[:10] +
def get_laohu_analysis(n, url, li_code): fig, axes = plt.subplots(nrows=10, ncols=10, figsize=(30,30)) li=[] nu_nu=0 jo=pd.DataFrame() quotes=pd.DataFrame() for code_nm in li_code: print('--------------------------------------'+str(nu_nu+(n*100))+'----------------------------------------------') con = requests.get(url.format(str(code_nm)), headers=header).json() time.sleep(0.1) li_data=con.get('items') if li_data is not None: jo=pd.DataFrame(li_data) quotes=jo.copy() quotes=quotes.sort_values(by="time", ascending=False)[:15] quotes=quotes.sort_values(by="time", ascending=True) quotes['time']=quotes['time'].apply(todate) quotes['time']=pd.to_datetime(quotes['time'], format="%Y-%m-%d") x = jo['time'].values y = jo['close'].values vol = jo['volume'].values vol_li = jo['volume'].tolist() vol_min=min(vol_li) vol_max=max(vol_li) vol_up=(vol_max-vol_min)*0.3+vol_max z1 = np.polyfit(x, y, 4)#用3次多项式拟合 p1 = np.poly1d(z1) # print(p1) #在屏幕上打印拟合多项式 yvals=p1(x)#也可以使用yvals=np.polyval(z1,x) der=p1.deriv(m=2) dder=der(x) dder_list = dder.tolist() maxindex=dder_list.index(max(dder_list)) # print(maxindex) time_max=jo['time'][maxindex] close_max=jo['close'][maxindex] p1_max=p1(time_max) # print(jo) ax=axes[nu_nu//10, nu_nu%10] count=quotes.shape[0] year=int(count/48) ax.set_title(str(code_nm)+'('+str(year)+')',fontsize=18,fontweight='bold') # plot1=ax.plot(x, y, marker=r'$\clubsuit$', color='goldenrod',markersize=15,label='original values') # plot1=ax.plot(x, y, 'o', color='goldenrod',markersize=10,label='original values') # data_list=[] # for row in quotes.itertuples(): # date_time = datetime.datetime.strptime(getattr(row,'time'),'%Y-%m-%d') # t = date2num(date_time) # open_tmp = getattr(row,'open') # high_tmp = getattr(row,'high') # low_tmp = getattr(row,'low') # close_tmp = getattr(row,'close') # datas = (t,open_tmp,high_tmp,low_tmp,close_tmp) # data_list.append(datas) candlestick_ohlc(ax, zip(mdates.date2num(quotes['time'].dt.to_pydatetime()), quotes['open'], quotes['high'], quotes['low'], quotes['close']), width=0.6,colordown='#53c156', colorup='#ff1717') # if len(quotes["close"].tolist()) >200: # ma150 = moving_average(quotes["close"], 150, type='simple') # ma200 = moving_average(quotes["close"], 200, type='simple') # linema150, = ax.plot(quotes['time'], ma150, color='blue', lw=2, label='MA (150)') # linema200, = ax.plot(quotes['time'], ma200, color='red', lw=2, label='MA (200)') # mpf.candlestick_ohlc(ax,data_list,width=1.5,colorup='r',colordown='green') ax.xaxis.set_major_locator(ticker.NullLocator()) ax.set_ylabel(' ', fontsize=0.01) ax.set_xlabel(' ', fontsize=0.01) ax.spines['top'].set_linewidth(2) ax.spines['bottom'].set_linewidth(2) ax.spines['left'].set_linewidth(2) ax.spines['right'].set_linewidth(2) for label in ax.get_xticklabels() + ax.get_yticklabels(): label.set_fontsize(14) ax2t = ax.twinx() volume = (quotes.close * quotes.volume) / 1e6 # dollar volume in millions vmax = volume.max() fillcolor = 'darkgoldenrod' poly = ax2t.fill_between(quotes.time.values, volume, 0, label='Volume', facecolor=fillcolor, edgecolor=fillcolor) ax2t.set_ylim(0, 20 * vmax) ax2t.set_yticks([]) ax2t.set_xticks([]) del jo, quotes gc.collect() # mondays = WeekdayLocator(MONDAY) # ax.xaxis.set_major_locator(mondays) # daysFmt = DateFormatter("%m%d") # ax.xaxis.set_major_formatter(daysFmt) # ax2t.autoscale_view() # ax2 = ax.twinx() # plot3=ax2.plot(x, vol, zorder=0, c="g",linewidth=2,alpha=0.7) # ax2.set_zorder(0) # # ax2.set_ylabel('volume') # ax2.set_ylim(vol_min,vol_up) # ax2.yaxis.set_major_locator(plt.NullLocator()) nu_nu=nu_nu+1 fig.tight_layout(rect=[0.02,0.02,0.98,0.98], pad=0.2, h_pad=0.2, w_pad=0.2) fig.subplots_adjust(wspace =0.2, hspace =0.2) plt.savefig('D:/Git/us_stock/technical_analysis/Main/up/4up1down1up_no_limit/up_data/'+date+"_fig_up_price_"+str(n)+".png")
import io_helper as ioh # define the location of the price data file filename_bitmex_data = "data/bitmex_data.msgpack.zlib" # load the price data with open(filename_bitmex_data, "rb") as f: temp = msgpack.unpackb(zlib.decompress(f.read())) price_symbol = temp[0]['symbol'] t_price_data = np.array([el["t_epoch"] for el in temp], dtype=np.float64) #price_data = np.array([el["open"] for el in temp], dtype=np.float64) price_data = np.array([el["close"] for el in temp], dtype=np.float64) # initialise some labels for the plot datenum_price_data = [ md.date2num(datetime.datetime.fromtimestamp(el)) for el in t_price_data ] datenum_price_data = datenum_price_data[::24] price_data = price_data[::24] # define the location of the input file filename_article_results = "results/article_results.json" # load the article data with open(filename_article_results, "rb") as f: article_results = json.load(f) article_symbol = article_results['symbol'] # define the location of the input file filename_funding_results = "results/funding_results.json"
def display(self, symbol, orders, klines): """ gs = gridspec.GridSpec(8, 1) gs.update(left=0.04, bottom=0.04, right=1, top=1, wspace=0, hspace=0) axes = [ plt.subplot(gs[0:-2, :]), #plt.subplot(gs[-4:-2, :]), plt.subplot(gs[-2:-1, :]), plt.subplot(gs[-1, :]) ] """ fig, axes = plt.subplots(5, 1, sharex=True) fig.subplots_adjust(left=0.04, bottom=0.04, right=1, top=1, wspace=0, hspace=0) trade_times = [order["trade_time"] for order in orders] quotes = [] for k in klines: d = datetime.fromtimestamp(k[0] / 1000) quote = (dts.date2num(d), float(k[1]), float(k[4]), float(k[2]), float(k[3])) quotes.append(quote) mpf.candlestick_ochl(axes[0], quotes, width=0.2, colorup='g', colordown='r') axes[0].set_ylabel('price') axes[0].grid(True) axes[0].autoscale_view() axes[0].xaxis_date() axes[0].plot(trade_times, [(order["deal_value"] / order["deal_amount"]) for order in orders], "o--") klines_df = pd.DataFrame(klines, columns=self.kline_column_names) open_times = [ datetime.fromtimestamp((open_time / 1000)) for open_time in klines_df["open_time"] ] klines_df["close"] = pd.to_numeric(klines_df["close"]) base_close = klines_df["close"].values[0] klines_df["ATR"] = talib.ATR(klines_df["high"], klines_df["low"], klines_df["close"], timeperiod=14) klines_df["NATR"] = talib.NATR(klines_df["high"], klines_df["low"], klines_df["close"], timeperiod=14) klines_df["TRANGE"] = talib.TRANGE(klines_df["high"], klines_df["low"], klines_df["close"]) # axes[0].plot(open_times, klines_df["ATR"]*10, "y:", label="ATR") axes[1].set_ylabel('volatility') axes[1].grid(True) axes[1].plot(open_times, klines_df["ATR"], "y:", label="ATR") axes[1].plot(open_times, klines_df["NATR"], "k--", label="NATR") axes[1].plot(open_times, klines_df["TRANGE"], "c--", label="TRANGE") ks, ds, js = ic.pd_kdj(klines_df) axes[2].set_ylabel('kdj') axes[2].grid(True) axes[2].plot(open_times, ks, "b", label="k") axes[2].plot(open_times, ds, "y", label="d") axes[2].plot(open_times, js, "m", label="j") axes[-2].set_ylabel('total profit rate') axes[-2].grid(True) axes[-2].plot( trade_times, [round(100 * order["total_profit_rate"], 2) for order in orders], "go--") axes[-2].plot(open_times, [ round(100 * ((close / base_close) - 1), 2) for close in klines_df["close"] ], "r--") axes[-1].set_ylabel('rate') axes[-1].grid(True) #axes[-1].set_label(["position rate", "profit rate"]) axes[-1].plot(trade_times, [round(100 * order["pst_rate"], 2) for order in orders], "k-", drawstyle="steps-post", label="position") axes[-1].plot(trade_times, [ round(100 * order["floating_profit_rate"], 2) for order in orders ], "g--", drawstyle="steps", label="profit") """ trade_times = [] pst_rates = [] for i, order in enumerate(orders): #补充 if i > 0 and orders[i-1]["pst_rate"] > 0: tmp_trade_date = orders[i-1]["trade_time"].date() + timedelta(days=1) while tmp_trade_date < order["trade_time"].date(): trade_times.append(tmp_trade_date) pst_rates.append(orders[i-1]["pst_rate"]) print("add %s, %s" % (tmp_trade_date, orders[i-1]["pst_rate"])) tmp_trade_date += timedelta(days=1) # 添加 trade_times.append(order["trade_time"]) pst_rates.append(order["pst_rate"]) print("%s, %s" % (order["trade_time"], order["pst_rate"])) plt.bar(trade_times, pst_rates, width= 0.3) # """ plt.show()
from matplotlib.dates import date2num # from base_def import BaseEnum # Set contants for unpacking .raw files BLOCK_SIZE = 1024 * 4 # Block size read in from binary file to search for token LENGTH_SIZE = 4 DATAGRAM_HEADER_SIZE = 12 CONFIG_HEADER_SIZE = 516 CONFIG_TRANSDUCER_SIZE = 320 # set global regex expressions to find all sample, annotation and NMEA sentences SAMPLE_REGEX = b'RAW\d{1}' SAMPLE_MATCHER = re.compile(SAMPLE_REGEX, re.DOTALL) # Reference time "seconds since 1900-01-01 00:00:00" REF_TIME = date2num(dt(1900, 1, 1, 0, 0, 0)) # ---------- NEED A GENERIC FILENAME PARSER ------------- # Common EK60 *.raw filename format # EK60_RAW_NAME_REGEX = r'(?P<Refdes>\S*)_*OOI-D(?P<Date>\d{8})-T(?P<Time>\d{6})\.raw' # EK60_RAW_NAME_MATCHER = re.compile(EK60_RAW_NAME_REGEX) # # Regex to extract the timestamp from the *.raw filename (path/to/OOI-DYYYYmmdd-THHMMSS.raw) # FILE_NAME_REGEX = r'(?P<Refdes>\S*)_*OOI-D(?P<Date>\d{8})-T(?P<Time>\d{6})\.raw' # FILE_NAME_MATCHER = re.compile(FILE_NAME_REGEX) WINDOWS_EPOCH = dt(1601, 1, 1) NTP_EPOCH = dt(1900, 1, 1) NTP_WINDOWS_DELTA = (NTP_EPOCH - WINDOWS_EPOCH).total_seconds() # Numpy data type object for unpacking the Sample datagram including the header from binary *.raw
def animate(i): global refreshRate global DatCounter def rsiIndicator(priceData, location="top"): try: if location == "top": values = { 'key': 1, "prices": priceData, "periods": topIndicator[1] } if location == "bottom": values = { 'key': 1, "prices": priceData, "periods": bottomIndicator[1] } url = "http://seaofbtc.com/api/indicator/rsi" data = urllib.parse.urlencode(values) data = data.encode("utf-8") req = urllib.request.Request(url, data) resp = urllib.request.urlopen(req) respData = resp.read() newData = str(respData).replace("b", "").replace("[", "").replace( "]", "").replace("'", "") priceList = newData.split(', ') rsiData = [float(i) for i in priceList] if location == "top": a0.plot_date(OHLC['MPLDates'], rsiData, lightColor, label="RSI") #datLabel = "RSI("+str(topIndicator[1])+")" #a0.set_ylabel(datLabel) if location == "bottom": a3.plot_date(OHLC['MPLDates'], rsiData, lightColor, label="RSI") #datLabel = "RSI("+str(topIndicator[1])+")" #a3.set_ylabel(datLabel) except Exception as e: print("failed in rsi", str(e)) if chartLoad: if paneCount == 1: if DataPace == "tick": try: if exchange == "BTC-e": a = plt.subplot2grid((6, 4), (0, 0), rowspan=5, colspan=4) a2 = plt.subplot2grid((6, 4), (5, 0), rowspan=1, colspan=4, sharex=a) dataLink = 'https://btc-e.com/api/3/trades/btc_usd?limit=2000' data = urllib.request.urlopen(dataLink) data = data.readall().decode("utf-8") data = json.loads(data) data = data["btc_usd"] data = pd.DataFrame(data) data["datestamp"] = np.array( data['timestamp']).astype("datetime64[s]") allDates = data["datestamp"].tolist() buys = data[(data['type'] == "bid")] #buys["datestamp"] = np.array(buys["timestamp"]).astype("datetime64[s]") buyDates = (buys["datestamp"]).tolist() sells = data[(data['type'] == "ask")] #sells["datestamp"] = np.array(sells["timestamp"]).astype("datetime64[s]") sellDates = (sells["datestamp"]).tolist() volume = data["amount"] a.clear() a.plot_date(buyDates, buys["price"], lightColor, label="buys") a.plot_date(sellDates, sells["price"], darkColor, label="sells") a2.fill_between(allDates, 0, volume, facecolor=darkColor) a.xaxis.set_major_locator(mticker.MaxNLocator(5)) a.xaxis.set_major_formatter( mdates.DateFormatter("%Y-%m-%d %H:%M:%S")) plt.setp(a.get_xticklabels(), visible=False) a.legend(bbox_to_anchor=(0, 1.02, 1, .102), loc=3, ncol=2, borderaxespad=0) title = "BTC-e BTCUSD Prices\nLast Price: " + str( data["price"][1999]) a.set_title(title) priceData = data['price'].apply(float).tolist() if exchange == "Bitstamp": a = plt.subplot2grid((6, 4), (0, 0), rowspan=5, colspan=4) a2 = plt.subplot2grid((6, 4), (5, 0), rowspan=1, colspan=4, sharex=a) dataLink = 'https://www.bitstamp.net/api/transactions/' data = urllib.request.urlopen(dataLink) data = data.readall().decode("utf-8") data = json.loads(data) data = pd.DataFrame(data) data["datestamp"] = np.array( data['date'].apply(int)).astype("datetime64[s]") dateStamps = data["datestamp"].tolist() #allDates = data["datestamp"].tolist() ## buys = data[(data['type']=="bid")] ## #buys["datestamp"] = np.array(buys["timestamp"]).astype("datetime64[s]") ## buyDates = (buys["datestamp"]).tolist() ## ## ## sells = data[(data['type']=="ask")] ## #sells["datestamp"] = np.array(sells["timestamp"]).astype("datetime64[s]") ## sellDates = (sells["datestamp"]).tolist() volume = data["amount"].apply(float).tolist() a.clear() a.plot_date(dateStamps, data["price"], lightColor, label="buys") a2.fill_between(dateStamps, 0, volume, facecolor=darkColor) a.xaxis.set_major_locator(mticker.MaxNLocator(5)) a.xaxis.set_major_formatter( mdates.DateFormatter("%Y-%m-%d %H:%M:%S")) plt.setp(a.get_xticklabels(), visible=False) a.legend(bbox_to_anchor=(0, 1.02, 1, .102), loc=3, ncol=2, borderaxespad=0) title = "Bitstamp BTCUSD Prices\nLast Price: " + str( data["price"][0]) a.set_title(title) priceData = data['price'].apply(float).tolist() if exchange == "Bitfinex": a = plt.subplot2grid((6, 4), (0, 0), rowspan=5, colspan=4) a2 = plt.subplot2grid((6, 4), (5, 0), rowspan=1, colspan=4, sharex=a) dataLink = 'https://api.bitfinex.com/v1/trades/btcusd?limit=2000' data = urllib.request.urlopen(dataLink) data = data.readall().decode("utf-8") data = json.loads(data) data = pd.DataFrame(data) data["datestamp"] = np.array( data['timestamp']).astype("datetime64[s]") allDates = data["datestamp"].tolist() buys = data[(data['type'] == "buy")] #buys["datestamp"] = np.array(buys["timestamp"]).astype("datetime64[s]") buyDates = (buys["datestamp"]).tolist() sells = data[(data['type'] == "sell")] #sells["datestamp"] = np.array(sells["timestamp"]).astype("datetime64[s]") sellDates = (sells["datestamp"]).tolist() volume = data["amount"].apply(float).tolist() a.clear() a.plot_date(buyDates, buys["price"], lightColor, label="buys") a.plot_date(sellDates, sells["price"], darkColor, label="sells") a2.fill_between(allDates, 0, volume, facecolor=darkColor) a.xaxis.set_major_locator(mticker.MaxNLocator(5)) a.xaxis.set_major_formatter( mdates.DateFormatter("%Y-%m-%d %H:%M:%S")) plt.setp(a.get_xticklabels(), visible=False) a.legend(bbox_to_anchor=(0, 1.02, 1, .102), loc=3, ncol=2, borderaxespad=0) title = "Bitfinex BTCUSD Prices\nLast Price: " + str( data["price"][0]) a.set_title(title) priceData = data['price'].apply(float).tolist() if exchange == "Huobi": a = plt.subplot2grid((6, 4), (0, 0), rowspan=6, colspan=4) data = urllib.request.urlopen( 'http://seaofbtc.com/api/basic/price?key=1&tf=1d&exchange=' + programName).read() data = data.decode() data = json.loads(data) dateStamp = np.array(data[0]).astype("datetime64[s]") dateStamp = dateStamp.tolist() df = pd.DataFrame({'Datetime': dateStamp}) df['Price'] = data[1] df['Volume'] = data[2] df['Symbol'] = "BTCUSD" df['MPLDate'] = df['Datetime'].apply( lambda date: mdates.date2num(date.to_pydatetime())) df = df.set_index("Datetime") lastPrice = df["Price"][-1] a.plot_date(df['MPLDate'][-4500:], df['Price'][-4500:], lightColor, label="price") a.xaxis.set_major_locator(mticker.MaxNLocator(5)) a.xaxis.set_major_formatter( mdates.DateFormatter("%Y-%m-%d %H:%M:%S")) title = "Huobi BTCUSD Prices\nLast Price: " + str( lastPrice) a.set_title(title) priceData = df['price'].apply(float).tolist() except Exception as e: print("Failed because of:", e) else: if DatCounter > 12: try: if exchange == "Huobi": if topIndicator != "none": a = plt.subplot2grid((6, 4), (1, 0), rowspan=5, colspan=4) a2 = plt.subplot2grid((6, 4), (0, 0), sharex=a, rowspan=1, colspan=4) else: a = plt.subplot2grid((6, 4), (0, 0), rowspan=6, colspan=4) else: if topIndicator != "none" and bottomIndicator != "none": # Main Graph a = plt.subplot2grid((6, 4), (1, 0), rowspan=3, colspan=4) # Volume a2 = plt.subplot2grid((6, 4), (4, 0), sharex=a, rowspan=1, colspan=4) # Bottom Indicator a3 = plt.subplot2grid((6, 4), (5, 0), sharex=a, rowspan=1, colspan=4) # Top Indicator a0 = plt.subplot2grid((6, 4), (0, 0), sharex=a, rowspan=1, colspan=4) elif topIndicator != "none": # Main Graph a = plt.subplot2grid((6, 4), (1, 0), rowspan=4, colspan=4) # Volume a2 = plt.subplot2grid((6, 4), (5, 0), sharex=a, rowspan=1, colspan=4) # Top Indicator a0 = plt.subplot2grid((6, 4), (0, 0), sharex=a, rowspan=1, colspan=4) elif bottomIndicator != "none": # Main Graph a = plt.subplot2grid((6, 4), (0, 0), rowspan=4, colspan=4) # Volume a2 = plt.subplot2grid((6, 4), (4, 0), sharex=a, rowspan=1, colspan=4) # Bottom Indicator a3 = plt.subplot2grid((6, 4), (5, 0), sharex=a, rowspan=1, colspan=4) else: # Main Graph a = plt.subplot2grid((6, 4), (0, 0), rowspan=5, colspan=4) # Volume a2 = plt.subplot2grid((6, 4), (5, 0), sharex=a, rowspan=1, colspan=4) data = urllib.request.urlopen( "http://seaofbtc.com/api/basic/price?key=1&tf=" + DataPace + "&exchange=" + programName).read() data = data.decode() data = json.loads(data) dateStamp = np.array(data[0]).astype("datetime64[s]") dateStamp = dateStamp.tolist() df = pd.DataFrame({'Datetime': dateStamp}) df['Price'] = data[1] df['Volume'] = data[2] df['Symbol'] = 'BTCUSD' df['MPLDate'] = df['Datetime'].apply( lambda date: mdates.date2num(date.to_pydatetime())) df = df.set_index("Datetime") OHLC = df['Price'].resample(resampleSize, how="ohlc") OHLC = OHLC.dropna() volumeData = df['Volume'].resample( resampleSize, how={'volume': 'sum'}) OHLC["dateCopy"] = OHLC.index OHLC["MPLDates"] = OHLC["dateCopy"].apply( lambda date: mdates.date2num(date.to_pydatetime())) del OHLC["dateCopy"] volumeData["dateCopy"] = volumeData.index volumeData["MPLDates"] = volumeData["dateCopy"].apply( lambda date: mdates.date2num(date.to_pydatetime())) del volumeData["dateCopy"] priceData = OHLC['close'].apply(float).tolist() a.clear() if middleIndicator != "none": for eachMA in middleIndicator: #ewma = pd.stats.moments.ewma if eachMA[0] == "sma": sma = pd.rolling_mean( OHLC["close"], eachMA[1]) label = str(eachMA[1]) + " SMA" a.plot(OHLC["MPLDates"], sma, label=label) if eachMA[0] == "ema": ewma = pd.stats.moments.ewma label = str(eachMA[1]) + " EMA" a.plot(OHLC["MPLDates"], ewma(OHLC["close"], eachMA[1]), label=label) a.legend(loc=0) if topIndicator[0] == "rsi": rsiIndicator(priceData, "top") elif topIndicator == "macd": try: computeMACD(priceData, location="top") except Exception as e: print(str(e)) if bottomIndicator[0] == "rsi": rsiIndicator(priceData, "bottom") elif bottomIndicator == "macd": try: computeMACD(priceData, location="bottom") except Exception as e: print(str(e)) csticks = candlestick_ohlc( a, OHLC[["MPLDates", "open", "high", "low", "close"]].values, width=candleWidth, colorup=lightColor, colordown=darkColor) a.set_ylabel("Price") if exchange != "Huobi": a2.fill_between(volumeData["MPLDates"], 0, volumeData['volume'], facecolor=darkColor) a2.set_ylabel("Volume") a.xaxis.set_major_locator(mticker.MaxNLocator(3)) a.xaxis.set_major_formatter( mdates.DateFormatter('%Y-%m-%d %H:%M')) if exchange != "Huobi": plt.setp(a.get_xticklabels(), visible=False) if topIndicator != "none": plt.setp(a0.get_xticklabels(), visible=False) if bottomIndicator != "none": plt.setp(a2.get_xticklabels(), visible=False) x = (len(OHLC['close'])) - 1 if DataPace == "1d": title = exchange + " 1 Day Data with " + resampleSize + " Bars\nLast Price: " + str( OHLC['close'][x]) if DataPace == "3d": title = exchange + " 3 Day Data with " + resampleSize + " Bars\nLast Price: " + str( OHLC['close'][x]) if DataPace == "7d": title = exchange + " 7 Day Data with " + resampleSize + " Bars\nLast Price: " + str( OHLC['close'][x]) if topIndicator != "none": a0.set_title(title) else: a.set_title(title) print("New Graph") DatCounter = 0 except Exception as e: print('failed in the non-tick animate:', str(e)) DatCounter = 9000 else: DatCounter += 1
def HMON_HYCOM_cross_transect(storm_id, cycle, tempt_lim_MAB, tempt_lim_GoM): #%% import numpy as np import matplotlib.pyplot as plt from datetime import datetime, timedelta import os import os.path import glob import cmocean from matplotlib.dates import date2num, num2date import xarray as xr import sys sys.path.append('/home/Maria.Aristizabal/NCEP_scripts/') from utils4HYCOM import readgrids #from utils4HYCOM import readdepth, readVar from utils4HYCOM2 import readBinz # Increase fontsize of labels globally plt.rc('xtick', labelsize=14) plt.rc('ytick', labelsize=14) plt.rc('legend', fontsize=14) #%% ti = datetime.today() ffig = '/home/Maria.Aristizabal/Figures/' + str( ti.year) + '/' + ti.strftime('%b-%d') folder_fig = ffig + '/' + storm_id + '_' + cycle + '/' os.system('mkdir ' + ffig) os.system('mkdir ' + folder_fig) #%% Bathymetry file bath_file = '/scratch2/NOS/nosofs/Maria.Aristizabal/bathymetry_files/GEBCO_2014_2D_-100.0_0.0_-10.0_70.0.nc' #%% Reading bathymetry data ncbath = xr.open_dataset(bath_file) bath_lat = ncbath.variables['lat'][:] bath_lon = ncbath.variables['lon'][:] bath_elev = ncbath.variables['elevation'][:] #%% folder and file names ti = datetime.today() - timedelta(1) folder_hmon_hycom = '/scratch2/NOS/nosofs/Maria.Aristizabal/HMON_HYCOM_' + storm_id + '_' + str( ti.year) + '/' + 'HMON_HYCOM_' + storm_id + '_' + cycle + '/' #%% Reading RTOFS grid grid_file = sorted( glob.glob(os.path.join(folder_hmon_hycom, '*regional.grid.*')))[0][:-2] #%% Reading RTOFS grid print('Retrieving coordinates from RTOFS') # Reading lat and lon #lines_grid = [line.rstrip() for line in open(grid_file+'.b')] lon_hycom = np.array(readgrids(grid_file, 'plon:', [0])) lat_hycom = np.array(readgrids(grid_file, 'plat:', [0])) #depth_HMON_HYCOM = np.asarray(readdepth(HMON_HYCOM_depth,'depth')) # Reading depths afiles = sorted( glob.glob(os.path.join(folder_hmon_hycom, '*hat10_3z' + '*.a'))) lines = [line.rstrip() for line in open(afiles[0][:-2] + '.b')] z = [] for line in lines[6:]: if line.split()[2] == 'temp': #print(line.split()[1]) z.append(float(line.split()[1])) depth_HYCOM = np.asarray(z) time_HYCOM = [] for x, file in enumerate(afiles): print(x) #lines=[line.rstrip() for line in open(file[:-2]+'.b')] #Reading time stamp year = int(file.split('/')[-1].split('.')[1][0:4]) month = int(file.split('/')[-1].split('.')[1][4:6]) day = int(file.split('/')[-1].split('.')[1][6:8]) hour = int(file.split('/')[-1].split('.')[1][8:10]) dt = int(file.split('/')[-1].split('.')[-2][1:]) timestamp_HYCOM = date2num(datetime(year, month, day, hour)) + dt / 24 time_HYCOM.append(num2date(timestamp_HYCOM)) # Reading 3D variable from binary file oktime = 0 # first file temp_HMON_HYCOM = readBinz(afiles[oktime][:-2], '3z', 'temp') #salt_HMON_HYCOM = readBinz(afiles[oktime][:-2],'3z','salinity') #%% x1 = -74.1 y1 = 39.4 x2 = -73.0 y2 = 38.6 # Slope m = (y1 - y2) / (x1 - x2) # Intercept b = y1 - m * x1 X = np.arange(x1, -72, 0.05) Y = b + m * X dist = np.sqrt((X - x1)**2 + (Y - y1)**2) * 111 # approx along transect distance in km oklon = np.round( np.interp(X, lon_hycom[0, :] - 360, np.arange(len(lon_hycom[0, :])))).astype(int) oklat = np.round( np.interp(Y, lat_hycom[:, 0], np.arange(len(lat_hycom[:, 0])))).astype(int) trans_temp_HYCOM = temp_HMON_HYCOM[oklat, oklon, :] #min_valt = 4 #max_valt = 27 nlevelst = tempt_lim_MAB[1] - tempt_lim_MAB[0] + 1 kw = dict(levels=np.linspace(tempt_lim_MAB[0], tempt_lim_MAB[1], nlevelst)) fig, ax = plt.subplots(figsize=(9, 3)) plt.contourf(dist, -depth_HYCOM, trans_temp_HYCOM.T, cmap=cmocean.cm.thermal, **kw) cbar = plt.colorbar() cbar.ax.tick_params(labelsize=16) plt.contour(dist, -depth_HYCOM, trans_temp_HYCOM.T, [26], color='k') cbar.ax.set_ylabel('($^\circ$C)', fontsize=14) cbar.ax.tick_params(labelsize=14) plt.ylabel('Depth (m)', fontsize=14) plt.xlabel('Along Transect Distance (km)', fontsize=14) plt.title('HMON-HYCOM Endurance Line ' + 'Storm ' + storm_id + ' Cycle ' + cycle + ' \n on ' + str(time_HYCOM[oktime])[0:13], fontsize=16) plt.ylim([-100, 0]) plt.xlim([0, 200]) file = folder_fig + 'HMON_HYCOM_temp_MAB_endurance_line_cycle_' + cycle plt.savefig(file, bbox_inches='tight', pad_inches=0.1) #%% Bathymetry GEBCO HYCOM domain kw = dict(levels=np.arange(-5000, 1, 200)) plt.figure() plt.contour(bath_lon, bath_lat, bath_elev, levels=[0], colors='k') plt.contourf(bath_lon, bath_lat, bath_elev, cmap=cmocean.cm.topo, **kw) plt.plot(X, Y, '-k') plt.colorbar() plt.axis('scaled') plt.title('GEBCO Bathymetry') plt.xlim(-76, -70) plt.ylim(35, 42) file = folder_fig + 'MAB_transect' plt.savefig(file, bbox_inches='tight', pad_inches=0.1) #%% x1 = -90 y1 = 20 + 52 / 60 x2 = -90 y2 = 30 Y = np.arange(y1, y2, 0.05) X = np.tile(x1, len(Y)) dist = np.sqrt((X - x1)**2 + (Y - y1)**2) * 111 # approx along transect distance in km oklon = np.round( np.interp(X, lon_hycom[0, :] - 360, np.arange(len(lon_hycom[0, :])))).astype(int) oklat = np.round( np.interp(Y, lat_hycom[:, 0], np.arange(len(lat_hycom[:, 0])))).astype(int) trans_temp_HYCOM = temp_HMON_HYCOM[oklat, oklon, :] #min_valt = 12 #max_valt = 32 nlevelst = tempt_lim_GoM[1] - tempt_lim_GoM[0] + 1 kw = dict(levels=np.linspace(tempt_lim_GoM[0], tempt_lim_GoM[1], nlevelst)) fig, ax = plt.subplots(figsize=(9, 3)) plt.contourf(dist, -depth_HYCOM, trans_temp_HYCOM.T, cmap=cmocean.cm.thermal, **kw) cbar = plt.colorbar() cbar.ax.tick_params(labelsize=16) plt.contour(dist, -depth_HYCOM, trans_temp_HYCOM.T, [26], color='k') cbar.ax.set_ylabel('($^\circ$C)', fontsize=14) cbar.ax.tick_params(labelsize=14) plt.ylabel('Depth (m)', fontsize=14) plt.xlabel('Along Transect Distance (km)', fontsize=14) plt.title('HMON-HYCOM Across GoMex ' + 'Storm ' + storm_id + ' Cycle ' + cycle + ' \n on ' + str(time_HYCOM[oktime])[0:13], fontsize=16) plt.ylim([-300, 0]) #plt.xlim([0,200]) file = folder_fig + 'HMON_HYCOM_temp_GoMex_across_cycle_' + cycle plt.savefig(file, bbox_inches='tight', pad_inches=0.1) #%% Bathymetry GEBCO HYCOM domain kw = dict(levels=np.arange(-5000, 1, 200)) plt.figure() plt.contour(bath_lon, bath_lat, bath_elev, levels=[0], colors='k') plt.contourf(bath_lon, bath_lat, bath_elev, cmap=cmocean.cm.topo, **kw) plt.plot(X, Y, '-k') plt.colorbar() plt.axis('scaled') plt.title('GEBCO Bathymetry') plt.xlim(-98, -80) plt.ylim(18, 32) file = folder_fig + 'GoMex_transect' plt.savefig(file, bbox_inches='tight', pad_inches=0.1)
import datetime as DT from matplotlib import pyplot as plt from matplotlib.dates import date2num data = [(DT.datetime.strptime('2016-10-03', "%Y-%m-%d"), 772.559998), (DT.datetime.strptime('2016-10-04', "%Y-%m-%d"), 776.429993), (DT.datetime.strptime('2016-10-05', "%Y-%m-%d"), 776.469971), (DT.datetime.strptime('2016-10-06', "%Y-%m-%d"), 776.859985), (DT.datetime.strptime('2016-10-07', "%Y-%m-%d"), 775.080017)] x = [date2num(date) for (date, value) in data] y = [value for (date, value) in data] fig = plt.figure() graph = fig.add_subplot(111) graph.plot(x, y, 'r-o') graph.set_xticks(x) graph.set_xticklabels([date.strftime("%Y-%m-%d") for (date, value) in data]) plt.xlabel('Date') plt.ylabel('Closing Value') plt.title('Closing stock value of Alphabet Inc.') plt.grid(linestyle='-', linewidth='0.5', color='blue') plt.show()
def getTimestamp(fname): num = float(fname.split(".")[1]) / 10.0 return md.date2num(dt.datetime.fromtimestamp(num))
for row in data: interval_start = row[0] interval_end = row[1] interval_mid = interval_start + (interval_end - interval_start) / 2 fraction_of_mass_sampled = np.float32(row[6]) fraction_of_mass_sampled_err = np.float32(row[7]) mass_conc = np.float32( row[2]) / (np.float32(row[5]) * fraction_of_mass_sampled) mass_conc_err = np.float32( row[3]) / (np.float32(row[5]) * fraction_of_mass_sampled) numb_conc = np.float32(row[4]) / (np.float32(row[5])) timeseries_data.append([interval_mid, mass_conc, numb_conc]) time = [ dates.date2num(datetime.utcfromtimestamp(row[0])) for row in timeseries_data ] mass_concs = [row[1] for row in timeseries_data] numb_concs = [row[2] for row in timeseries_data] #plotting hfmt = dates.DateFormatter('%Y%m%d %H:%M') fig = plt.figure(figsize=(12, 10)) ax1 = plt.subplot2grid((2, 1), (0, 0)) ax2 = plt.subplot2grid((2, 1), (1, 0)) ax1.scatter(time, mass_concs, color='b', marker='o') ax1.xaxis.set_major_formatter(hfmt) ax1.set_ylabel('rBC mass concentration (ng/m3)')
def main(): var_name_liquid = "I1" var_name_solid = "I2" #peirod of interest start_year = 1979 end_year = 1988 #spatial averaging will be done over upstream points to the stations selected_ids = [ "092715", "080101", "074903", "050304", "080104", "081007", "061905", "041903", "040830", "093806", "090613", "081002", "093801", "080718" ] selected_ids = ["074903"] #simulation names corresponding to the paths sim_names = ["crcm5-hcd-rl", "crcm5-hcd-rl-intfl"] sim_labels = [x.upper() for x in sim_names] colors = ["blue", "violet"] layer_widths = [ 0.1, 0.2, 0.3, 0.4, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 1.0, 3.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0 ] layer_depths = np.cumsum(layer_widths) paths = [ "/home/huziy/skynet3_rech1/from_guillimin/new_outputs/quebec_0.1_crcm5-hcd-rl_spinup", "/home/huziy/skynet3_rech1/from_guillimin/new_outputs/quebec_0.1_crcm5-hcd-rl-intfl_spinup2/Samples_all_in_one" ] seasons = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11]] season_names = ["DJF", "MAM", "JJA", "SON"] managers = [ Crcm5ModelDataManager(samples_folder_path=path, file_name_prefix="pm", all_files_in_samples_folder=True, need_cell_manager=(i == 0)) for i, path in enumerate(paths) ] #share the cell manager a_data_manager = managers[0] assert isinstance(a_data_manager, Crcm5ModelDataManager) cell_manager = a_data_manager.cell_manager assert isinstance(cell_manager, CellManager) for m in managers[1:]: assert isinstance(m, Crcm5ModelDataManager) m.cell_manager = cell_manager #share the lake fraction field lake_fraction = a_data_manager.lake_fraction #selected_ids = ["092715", "080101", "074903", "050304", "080104", "081007", "061905", # "041903", "040830", "093806", "090613", "081002", "093801", "080718"] start_date = datetime(start_year, 1, 1) end_date = datetime(end_year, 12, 31) stations = cehq_station.read_station_data(selected_ids=selected_ids, start_date=start_date, end_date=end_date) #stations with corresponding model points station_to_mp = a_data_manager.get_dataless_model_points_for_stations( stations) #figure out levels in soil sim_label_to_profiles = {} for s, mp in station_to_mp.items(): assert isinstance(mp, ModelPoint) mask = (mp.flow_in_mask == 1) & (lake_fraction < 0.6) fig = plt.figure() fmt = ScalarFormatter(useMathText=True) fmt.set_powerlimits([-2, 2]) print(mp.ix, mp.jy, s.id) for m, label, color in zip(managers, sim_labels, colors): assert isinstance(m, Crcm5ModelDataManager) monthly_means_liquid = _get_cached_monthly_mean_fields( label, start_year, end_year, var_name_liquid) if monthly_means_liquid is None: monthly_means_liquid = m.get_monthly_climatology_of_3d_field( var_name=var_name_liquid, start_year=start_year, end_year=end_year) _cache_monthly_mean_fields(monthly_means_liquid, label, start_year, end_year, var_name_liquid) monthly_means_solid = _get_cached_monthly_mean_fields( label, start_year, end_year, var_name_solid) if monthly_means_solid is None: monthly_means_solid = m.get_monthly_climatology_of_3d_field( var_name=var_name_solid, start_year=start_year, end_year=end_year) _cache_monthly_mean_fields(monthly_means_solid, label, start_year, end_year, var_name_solid) profiles = [ monthly_means_liquid[i][mask, :].mean(axis=0) + monthly_means_solid[i][mask, :].mean(axis=0) for i in range(12) ] sim_label_to_profiles[label] = np.array(profiles) x = [date2num(datetime(2001, month, 1)) for month in range(1, 13)] y = layer_depths y2d, x2d = np.meshgrid(y, x) delta = (sim_label_to_profiles[sim_labels[1]] - sim_label_to_profiles[sim_labels[0]] ) / sim_label_to_profiles[sim_labels[0]] * 100 #delta = np.ma.masked_where(delta < 0.1, delta) cmap = my_colormaps.get_cmap_from_ncl_spec_file( path="colormap_files/BlueRed.rgb", ncolors=10) the_min = -6.0 the_max = 6.0 step = (the_max - the_min) / float(cmap.N) plt.pcolormesh(x2d[:, :8], y2d[:, :8], delta[:, :8], cmap=cmap, vmin=the_min, vmax=the_max) #, levels = np.arange(-6,7,1)) plt.gca().invert_yaxis() plt.colorbar(ticks=np.arange(the_min, the_max + step, step)) plt.gca().set_ylabel("Depth (m)") plt.gca().xaxis.set_major_formatter(DateFormatter("%b")) #fig.tight_layout() fig.savefig("soil_profile_upstream_of_{0}.pdf".format(s.id)) pass
def HWRF_POM_cross_transect(storm_id, cycle, tempt_lim_MAB, tempt_lim_GoM): #%% from matplotlib import pyplot as plt import numpy as np import xarray as xr from matplotlib.dates import date2num, num2date from datetime import datetime, timedelta import os import os.path import glob import cmocean # Increase fontsize of labels globally plt.rc('xtick', labelsize=14) plt.rc('ytick', labelsize=14) plt.rc('legend', fontsize=14) #%% ti = datetime.today() ffig = '/home/Maria.Aristizabal/Figures/' + str( ti.year) + '/' + ti.strftime('%b-%d') folder_fig = ffig + '/' + storm_id + '_' + cycle + '/' os.system('mkdir ' + ffig) os.system('mkdir ' + folder_fig) #%% Bathymetry file bath_file = '/scratch2/NOS/nosofs/Maria.Aristizabal/bathymetry_files/GEBCO_2014_2D_-100.0_0.0_-10.0_70.0.nc' #%% Reading bathymetry data ncbath = xr.open_dataset(bath_file) bath_lat = ncbath.variables['lat'][:] bath_lon = ncbath.variables['lon'][:] bath_elev = ncbath.variables['elevation'][:] #%% folder and file names ti = datetime.today() - timedelta(1) folder_hwrf_pom = '/scratch2/NOS/nosofs/Maria.Aristizabal/HWRF_POM_' + storm_id + '_' + str( ti.year) + '/' + 'HWRF_POM_' + storm_id + '_' + cycle + '/' #%% Reading POM grid file grid_file = sorted(glob.glob(os.path.join(folder_hwrf_pom, '*grid*.nc')))[0] pom_grid = xr.open_dataset(grid_file) lon_pom = np.asarray(pom_grid['east_e'][:]) lat_pom = np.asarray(pom_grid['north_e'][:]) zlevc = np.asarray(pom_grid['zz'][:]) topoz = np.asarray(pom_grid['h'][:]) #%% Getting list of POM files ncfiles = sorted(glob.glob(os.path.join(folder_hwrf_pom, '*pom.0*.nc'))) # Reading POM time time_pom = [] for i, file in enumerate(ncfiles): print(i) pom = xr.open_dataset(file) tpom = pom['time'][:] timestamp_pom = date2num(tpom)[0] time_pom.append(num2date(timestamp_pom)) time_POM = np.asarray(time_pom) oktime_POM = np.where(time_POM == time_POM[1])[0][0] #second file #%% Figure temp transect along Endurance line x1 = -74.1 y1 = 39.4 x2 = -73.0 y2 = 38.6 # Slope m = (y1 - y2) / (x1 - x2) # Intercept b = y1 - m * x1 X = np.arange(x1, -72, 0.05) Y = b + m * X dist = np.sqrt((X - x1)**2 + (Y - y1)**2) * 111 # approx along transect distance in km oklon = np.round(np.interp(X, lon_pom[0, :], np.arange(len(lon_pom[0, :])))).astype(int) oklat = np.round(np.interp(Y, lat_pom[:, 0], np.arange(len(lat_pom[:, 0])))).astype(int) topoz_pom = np.asarray(topoz[oklat, oklon]) zmatrix_POM = np.dot(topoz_pom.reshape(-1, 1), zlevc.reshape(1, -1)).T dist_matrix = np.tile(dist, (zmatrix_POM.shape[0], 1)) trans_temp_POM = np.empty((zmatrix_POM.shape[0], zmatrix_POM.shape[1])) trans_temp_POM[:] = np.nan pom = xr.open_dataset(ncfiles[oktime_POM]) for x in np.arange(len(X)): print(x) trans_temp_POM[:, x] = np.asarray(pom['t'][0, :, oklat[x], oklon[x]]) #min_valt = 4 #max_valt = 27 nlevelst = tempt_lim_MAB[1] - tempt_lim_MAB[0] + 1 kw = dict(levels=np.linspace(tempt_lim_MAB[0], tempt_lim_MAB[1], nlevelst)) fig, ax = plt.subplots(figsize=(9, 5)) plt.contourf(dist_matrix, zmatrix_POM, trans_temp_POM, cmap=cmocean.cm.thermal, **kw) cbar = plt.colorbar() cbar.ax.tick_params(labelsize=16) plt.contour(dist_matrix, zmatrix_POM, trans_temp_POM, [26], color='k') cbar.ax.set_ylabel('($^\circ$C)', fontsize=14) cbar.ax.tick_params(labelsize=14) plt.title('HWRF-POM Endurance Line MAB ' + 'Storm ' + storm_id + ' Cycle ' + cycle + ' \n on ' + str(time_POM[oktime_POM])[0:13], fontsize=16) plt.ylim([-100, 0]) plt.xlim([0, 200]) ax.set_ylabel('Depth (m)', fontsize=14) ax.set_xlabel('Along Transect Distance (km)', fontsize=14) file = folder_fig + 'HWRF_POM_temp_MAB_endurance_line_cycle_' + cycle plt.savefig(file, bbox_inches='tight', pad_inches=0.1) #%% Bathymetry GEBCO HYCOM domain kw = dict(levels=np.arange(-5000, 1, 200)) plt.figure() plt.contour(bath_lon, bath_lat, bath_elev, levels=[0], colors='k') plt.contourf(bath_lon, bath_lat, bath_elev, cmap=cmocean.cm.topo, **kw) plt.plot(X, Y, '-k') plt.colorbar() plt.axis('scaled') plt.title('GEBCO Bathymetry') plt.xlim(-76, -70) plt.ylim(35, 42) file = folder_fig + 'MAB_transect' plt.savefig(file, bbox_inches='tight', pad_inches=0.1) #%% Figure temp transect across GoMex x1 = -90 y1 = 20 + 52 / 60 x2 = -90 y2 = 30 Y = np.arange(y1, y2, 0.05) X = np.tile(x1, len(Y)) dist = np.sqrt((X - x1)**2 + (Y - y1)**2) * 111 # approx along transect distance in km oklon = np.round(np.interp(X, lon_pom[0, :], np.arange(len(lon_pom[0, :])))).astype(int) oklat = np.round(np.interp(Y, lat_pom[:, 0], np.arange(len(lat_pom[:, 0])))).astype(int) topoz_pom = np.asarray(topoz[oklat, oklon]) zmatrix_POM = np.dot(topoz_pom.reshape(-1, 1), zlevc.reshape(1, -1)).T dist_matrix = np.tile(dist, (zmatrix_POM.shape[0], 1)) trans_temp_POM = np.empty((zmatrix_POM.shape[0], zmatrix_POM.shape[1])) trans_temp_POM[:] = np.nan pom = xr.open_dataset(ncfiles[oktime_POM]) for x in np.arange(len(X)): print(x) trans_temp_POM[:, x] = np.asarray(pom['t'][0, :, oklat[x], oklon[x]]) #min_valt = 12 #max_valt = 32 nlevelst = tempt_lim_GoM[1] - tempt_lim_GoM[0] + 1 kw = dict(levels=np.linspace(tempt_lim_GoM[0], tempt_lim_GoM[1], nlevelst)) fig, ax = plt.subplots(figsize=(9, 3)) plt.contourf(dist_matrix, zmatrix_POM, trans_temp_POM, cmap=cmocean.cm.thermal, **kw) cbar = plt.colorbar() cbar.ax.tick_params(labelsize=16) plt.contour(dist_matrix, zmatrix_POM, trans_temp_POM, [26], color='k') cbar.ax.set_ylabel('($^\circ$C)', fontsize=14) cbar.ax.tick_params(labelsize=14) plt.title('HWRF-POM Across GoMex ' + 'Storm ' + storm_id + ' Cycle ' + ' \n on ' + str(time_POM[oktime_POM])[0:13], fontsize=16) plt.ylim([-300, 0]) #plt.xlim([0,200]) ax.set_ylabel('Depth (m)', fontsize=14) ax.set_xlabel('Along Transect Distance (km)', fontsize=14) file = folder_fig + 'HWRF_POM_temp_GoMex_across_cycle_' + cycle plt.savefig(file, bbox_inches='tight', pad_inches=0.1) #%% Bathymetry GEBCO HYCOM domain kw = dict(levels=np.arange(-5000, 1, 200)) plt.figure() plt.contour(bath_lon, bath_lat, bath_elev, levels=[0], colors='k') plt.contourf(bath_lon, bath_lat, bath_elev, cmap=cmocean.cm.topo, **kw) plt.plot(X, Y, '-k') plt.colorbar() plt.axis('scaled') plt.title('GEBCO Bathymetry') plt.xlim(-98, -80) plt.ylim(18, 32) file = folder_fig + 'GoMex_transect' plt.savefig(file, bbox_inches='tight', pad_inches=0.1)
#設定K線初始變數 STime = TimetoNumber('08450000') #設定K線週期 Cycle = 6000 OHLC = [] lastAmount = 0 #計算每分鐘OHLC for i in I020: time = TimetoNumber(i[0]) price = int(i[4]) amount = int(i[6]) if len(OHLC) == 0: OHLC += [[ mdates.date2num( datetime.datetime.strptime(NumbertoTime(STime + Cycle), "%H%M%S")), price, price, price, price, 0 ]] if time < STime + Cycle: if price > OHLC[-1][2]: OHLC[-1][2] = price if price < OHLC[-1][3]: OHLC[-1][3] = price OHLC[-1][4] = price else: OHLC[-1][5] = amount - lastAmount lastAmount = amount STime += Cycle OHLC += [[ mdates.date2num(
176758, '2015-03-04', 825, 176958, '2015-04-04', 830, 176926, ] # Extract the dates from the data array date_strings = data[0::3] # Convert date strings into numbers date_nums = [] for d in date_strings: date_nums.append(date2num(datetime.strptime(d, '%Y-%m-%d'))) # Extract number of files from data array n_files = data[1::3] # Extract number of lines of code from data array n_lines = data[2::3] # Get a reference to the figure fig = plt.figure() # 111 is equivalent to Matlab's subplot(1,1,1) command ax1 = fig.add_subplot(111) ax1.plot(date_nums, n_files, 'bo-') ax1.set_ylabel('Files (blue circles)')
if tend.day < 10: fol = 'rtofs.' + str(tini.year) + str(tini.month) + '0' + str(tini.day) else: fol = 'rtofs.' + str(tini.year) + str(tini.month) + str(tini.day) ncRTOFS = xr.open_dataset(folder_RTOFS + fol + '/' + nc_files_RTOFS[0]) latRTOFS = np.asarray(ncRTOFS.Latitude[:]) lonRTOFS = np.asarray(ncRTOFS.Longitude[:]) depthRTOFS = np.asarray(ncRTOFS.Depth[:]) tRTOFS = [] for t in np.arange(len(nc_files_RTOFS)): ncRTOFS = xr.open_dataset(folder_RTOFS + fol + '/' + nc_files_RTOFS[t]) tRTOFS.append(np.asarray(ncRTOFS.MT[:])[0]) tRTOFS = np.asarray([mdates.num2date(mdates.date2num(tRTOFS[t])) \ for t in np.arange(len(nc_files_RTOFS))]) #%% Downloading and reading Copernicus grid COP_grid = xr.open_dataset(ncCOP_global) latCOP_glob = COP_grid.latitude[:] lonCOP_glob = COP_grid.longitude[:] #%% Reading bathymetry data ncbath = xr.open_dataset(bath_file) bath_lat = ncbath.variables['lat'][:] bath_lon = ncbath.variables['lon'][:] bath_elev = ncbath.variables['elevation'][:] #%%
label='Average Price', linestyle='--') plt.title('Stock of Moving Average Envelopes') plt.ylabel('Price') plt.xlabel('Date') plt.legend(loc='best') plt.show() # ## Candlestick with MAE from matplotlib import dates as mdates dfc = df.copy() dfc['VolumePositive'] = dfc['Open'] < dfc['Adj Close'] #dfc = dfc.dropna() dfc = dfc.reset_index() dfc['Date'] = mdates.date2num(dfc['Date'].tolist()) from mplfinance.original_flavor import candlestick_ohlc fig = plt.figure(figsize=(14, 7)) ax1 = plt.subplot(111) candlestick_ohlc(ax1, dfc.values, width=0.5, colorup='g', colordown='r', alpha=1.0) ax1.plot(df['Upper_Envelope'], color='blue') ax1.plot(df['Lower_Envelope'], color='red') ax1.plot(df['Adj Close'].rolling(20).mean(), color='orange') ax1.xaxis_date() ax1.xaxis.set_major_formatter(mdates.DateFormatter('%d-%m-%Y'))
""" ============== Load Converter ============== """ import dateutil.parser from matplotlib import cbook, dates import matplotlib.pyplot as plt import numpy as np datafile = cbook.get_sample_data('msft.csv', asfileobj=False) print('loading', datafile) data = np.genfromtxt( datafile, delimiter=',', names=True, converters={0: lambda s: dates.date2num(dateutil.parser.parse(s))}) fig, ax = plt.subplots() ax.plot_date(data['Date'], data['High'], '-') fig.autofmt_xdate() plt.show()
def plot_tog(): fig = plt.figure(figsize=(7, 10)) gss = gridspec.GridSpec(nrows=3, ncols=1, height_ratios=[1, 2,2]) ax1 = plt.subplot(gss[0]) ax2 = plt.subplot(gss[1]) ax3 = plt.subplot(gss[2], sharex=ax2) ax1.plot(gl, color='r', label='GOES 1-8$\mathrm{\AA}$') ax1.plot(gs, color='b', label='GOES 1-8$\mathrm{\AA}$') ax1.set_yscale('log') ax1.set_xlim(flare_ts, flare_te) ax1.xaxis.set_minor_locator(dates.SecondLocator(interval=10)) ax1.xaxis.set_major_locator(dates.MinuteLocator(interval=1)) ax1.legend(loc='upper right') ax1.tick_params(labelbottom=False, which='both', direction='in') ax1.set_ylabel('Flux (Wm$^{-2}$)') ax1.axvline(pul_ts, color='k') ax1.axvline(pul_te, color='k') ax2.plot(norp17, label='NoRP 17GHz', color='darkred', drawstyle='steps-mid') ax2.plot(norp9, label='NoRP 9GHz', color='darkblue', drawstyle='steps-mid') ax2.legend(loc='upper right') ax2.set_ylabel('Flux (SFU)') ax2.set_ylim(0, 200) x1 = dates.date2num(datetime.datetime.strptime(pul_ts, '%Y-%m-%d %H:%M:%S')) # start time x2 = dates.date2num(datetime.datetime.strptime(pul_te, '%Y-%m-%d %H:%M:%S')) # end time xyA = (x1, -0.01) xyB = (0.0, 1) # x and y in axes coordinates coordsA = ax1.get_xaxis_transform() # x in data coords, y in axes coords coordsB = "axes fraction" con_start = ConnectionPatch(xyA=xyA, xyB=xyB, coordsA=coordsA, coordsB=coordsB, axesA=ax1, axesB=ax2, arrowstyle="-") xyC = (x2, -0.01) xyD = (1, 1) # x and y in axes coordinates coordsC = ax1.get_xaxis_transform() # x in data coords, y in axes coords coordsD = "axes fraction" con_end = ConnectionPatch(xyA=xyC, xyB=xyD, coordsA=coordsC, coordsB=coordsD, axesA=ax1, axesB=ax2, arrowstyle="-") ax2.add_artist(con_start) ax2.add_artist(con_end) #ax3 = ax2.twinx() ax3.plot(rhessi_2535, drawstyle='steps-mid', label='RHESSI 25-35keV', color='grey', lw=0.8) ax3.plot(rhessi_35100, drawstyle='steps-mid', label='RHESSI 35-100keV', color='k', lw=0.8) ax3.legend(loc = 'upper right') ax3.set_ylabel('Counts det$^{-1}$ s$^{-1}$') ax3.set_xlabel('Time (UT) 2014-06-11') ax3.set_xlim(pul_ts, pul_te) ax2.xaxis.set_minor_locator(dates.SecondLocator(interval=10)) ax2.xaxis.set_major_locator(dates.MinuteLocator(interval=1)) ax2.xaxis.set_major_formatter(dates.DateFormatter('%H:%M:%S')) ax2.tick_params(which='both', direction='in', labelbottom=False) #ax3.xaxis.set_tick_params(rotation=45, which='both', direction='in') ax3.xaxis.set_tick_params(which='both', direction='in') plt.tight_layout() plt.subplots_adjust(hspace=0.1) plt.savefig('overview_test.png', dpi=200) plt.close()
#開始進行內外盤計算 for i in range(1, len(Data)): time = datetime.datetime.strptime(Data[i][0], "%H%M%S%f") price = float(Data[i][2]) qty = int(Data[i][3]) value = price * 1000 * qty if value > BigValue: BigTrade.append([time, price]) # print(BigTrade) #開始進行繪圖 #取得轉換時間字串至時間格式 Time = [datetime.datetime.strptime(line[0], "%H%M%S%f") for line in Data] #將datetime時間格式轉換為繪圖專用的時間格式,透過mdates.date2num函數 Time1 = [mdates.date2num(line) for line in Time] #價格由字串轉數值 Price = [float(line[2]) for line in Data] #定義圖表物件 ax = plt.subplot(111) #定義title plt.title('Price&BigTrade Line') # 繪製價格折線圖 ax.plot_date(Time1, Price, 'k-') if len(BigTrade) != 0: #取得轉換時間字串至時間格式 STime = [line[0] for line in BigTrade]
def MpfPlotWave(ticker): # assert ticker is not empty if len(ticker) == 0: return start = datetime(2019, 2, 9) end = datetime(2020, 2, 11) # values that can be parameterized intEmaPeriod = 34 intBars = 90 row = 0 for i in ticker: ohlc = get_historical_data(i, start, end, output_format='pandas', token=jsnIEX['iextoken']) ohlc.columns = ["Open", "High", "Low", "Close", "Volume"] if (intBars + intEmaPeriod > len(ohlc)): print("Error: Bars + EmaPeriod exceeds ohlc " + str(len(ohlc) - intBars - intEmaPeriod)) return hEma = ohlc['High'].ewm(intEmaPeriod).mean() cEma = ohlc['Close'].ewm(intEmaPeriod).mean() lEma = ohlc['Low'].ewm(intEmaPeriod).mean() # extract OHLC into a list of lists lohlc = ohlc[['Open', 'High', 'Low', 'Close']].values.tolist() # convert dates in datetime format to mathplotlib dates mdates = dates.date2num(ohlc.index) # prepare ohlc in mathplotlib format mohlc = [[mdates[i]] + lohlc[i] for i in range(len(mdates))] # set default font sizes params = {'axes.labelsize': 20, 'axes.titlesize': 24} pyplot.rcParams.update(params) fig, ax = pyplot.subplots(figsize=(24, 24)) # set default tick sizes ax.tick_params(axis='both', which='major', labelsize=20) ax.tick_params(axis='both', which='minor', labelsize=18) # mpfold.plot_day_summary_ohlc(ax, mohlc[-50:], ticksize=5, colorup='#77d879', colordown='#db3f3f') # alternatively, use a barchart mpfold.candlestick_ohlc(ax, mohlc[-intBars:], width=0.4, colorup='#77d879', colordown='#db3f3f') ax.plot(hEma[-intBars:], color='red', linewidth=2, label='high, ' + str(intEmaPeriod) + '-Day EMA') ax.plot(cEma[-intBars:], color='green', linewidth=2, label='close, ' + str(intEmaPeriod) + '-Day EMA') ax.plot(lEma[-intBars:], color='blue', linewidth=2, label='low, ' + str(intEmaPeriod) + '-Day EMA') ax.set_xlabel('Date') ax.set_ylabel('Price') ax.set_title(i + ' Chart with ' + str(intEmaPeriod) + '-Day EMA Wave') ax.legend(fontsize=20) ax.xaxis.set_major_formatter(dates.DateFormatter('%b %d')) fig.autofmt_xdate()
okd = np.logical_and(depthg_gridded >= np.min(depthf[okt]), depthg_gridded < np.max(depthf[okt])) tempg_gridded[okd, t] = np.interp(depthg_gridded[okd], depthf[okt], tempf[okt]) oks = np.isfinite(saltf) if np.sum(oks) < 3: saltg_gridded[:, t] = np.nan else: okd = np.logical_and(depthg_gridded >= np.min(depthf[oks]), depthg_gridded < np.max(depthf[oks])) saltg_gridded[okd, t] = np.interp(depthg_gridded[okd], depthf[oks], saltf[oks]) # Narrowing time window of Doppio to coincide with glider time window tmin = mdates.num2date(mdates.date2num(timeg[0])) tmax = mdates.num2date(mdates.date2num(timeg[-1])) # Changing times to timestamp ttdoppio = [ datetime(tdoppio[i].year, tdoppio[i].month, tdoppio[i].day, tdoppio[i].hour) for i in np.arange(len(tdoppio)) ] ttstamp_doppio = [ mdates.date2num(ttdoppio[i]) for i in np.arange(len(ttdoppio)) ] tstamp_glider = [ mdates.date2num(timeg[i]) for i in np.arange(len(timeg)) ] # oktime_doppio = np.where(np.logical_and(tdoppio >= tmin,tdoppio <= tmax))
def plot3Axes(self, figNum, time, data, title='', units='', xlabel='Time (s)', options='', xlim=None, ylim=None): fig = plt.figure(figNum) # plt.cla() if title: self.title = title if units: self.units = units if self.preTitle: fig.canvas.set_window_title("Figure %d - %s" % (figNum, self.preTitle)) if not figNum in self.sharex.keys(): self.sharex[figNum] = plt.subplot(3, 1, 1) # plt.plot(time, data[:,0], options) # Plot 1 subplt = plt.subplot(3, 1, 1, sharex=self.sharex[figNum]) # plt.hold(True); plt.grid(True) plt.title("%s" % (self.title)) plt.ylabel('(%s)' % (self.units)) plt.xlabel(xlabel) plt.margins(0.04) if xlim: subplt.set_xlim(xlim) if ylim: subplt.set_ylim(ylim) if self.timeIsUtc: dates = [dt.datetime.fromtimestamp(ts) for ts in time] datenums = md.date2num(dates) # plt.subplots_adjust(bottom=0.2) plt.xticks(rotation=25) ax = plt.gca() if self.timeIsUtc == 2: xfmt = md.DateFormatter('%H:%M:%S.%f') else: xfmt = md.DateFormatter('%H:%M:%S') ax.xaxis.set_major_formatter(xfmt) plt.plot(datenums, data[:, 0], options) else: plt.plot(time, data[:, 0], options) # Plot 2 subplt = plt.subplot(3, 1, 2, sharex=self.sharex[figNum]) # plt.hold(True); plt.grid(True) plt.ylabel('(%s)' % (self.units)) plt.xlabel(xlabel) plt.margins(0.04) if xlim: subplt.set_xlim(xlim) if ylim: subplt.set_ylim(ylim) if self.timeIsUtc: dates = [dt.datetime.fromtimestamp(ts) for ts in time] datenums = md.date2num(dates) # plt.subplots_adjust(bottom=0.2) plt.xticks(rotation=25) ax = plt.gca() if self.timeIsUtc == 2: xfmt = md.DateFormatter('%H:%M:%S.%f') else: xfmt = md.DateFormatter('%H:%M:%S') ax.xaxis.set_major_formatter(xfmt) plt.plot(datenums, data[:, 1], options) else: plt.plot(time, data[:, 1], options) # Plot 3 subplt = plt.subplot(3, 1, 3, sharex=self.sharex[figNum]) # plt.hold(True); plt.grid(True) plt.ylabel('(%s)' % (self.units)) plt.xlabel(xlabel) plt.margins(0.04) if xlim: subplt.set_xlim(xlim) if ylim: subplt.set_ylim(ylim) if self.timeIsUtc: dates = [dt.datetime.fromtimestamp(ts) for ts in time] datenums = md.date2num(dates) # plt.subplots_adjust(bottom=0.2) plt.xticks(rotation=25) ax = plt.gca() if self.timeIsUtc == 2: xfmt = md.DateFormatter('%H:%M:%S.%f') else: xfmt = md.DateFormatter('%H:%M:%S') ax.xaxis.set_major_formatter(xfmt) plt.plot(datenums, data[:, 2], options) else: plt.plot(time, data[:, 2], options) # legend(['desire','actual','e/10','e2/10']) return fig
def __init__(self, master): # load data datetime_list, barpress_list = [], [] datetime_re = re.compile(r'[\d]{2,4}') # regex to get datetime info for year in range(2012, 2016): file = Path(f'{DATA_FOLDER}/Environmental_Data_Deep_Moor_{year}.txt') print('Loading {0}'.format(file.name)) for row in DictReader(file.open('r'), delimiter='\t'): barpress_list.append(float(row['Barometric_Press'])) datetime_list.append(date2num( datetime(*list(map(int, datetime_re.findall(row['date time '])))))) self.datetime_array = np.array(datetime_list) self.barpress_array = np.array(barpress_list) # build the gui master.title('Weather Statistics') master.resizable(True, True) # maximize Tkinter windows # ref: https://stackoverflow.com/questions/15981000/tkinter-python-maximize-window try: master.state('zoomed') except (TclError): size = master.maxsize() master.geometry('{}x{}+0+0'.format(*size)) # draw the figure matplotlib.rc('font', size=18) figure = Figure() figure.set_facecolor((0, 0, 0, 0)) self.a = figure.add_subplot(111) self.canvas = FigureCanvasTkAgg(figure, master) self.canvas.draw() # add toolbar toolbar_frame = ttk.Frame(master) # needed to put navbar above plot toolbar = NavigationToolbar2Tk(self.canvas, toolbar_frame) toolbar.update() toolbar_frame.pack(side=TOP, fill=X, expand=0) self.canvas._tkcanvas.pack(fill=BOTH, expand=1) controls_frame = ttk.Frame(master) controls_frame.pack() ttk.Label(controls_frame, text='Start', font='Arial 18 bold').grid(row=0, column=0, pady=5) ttk.Label(controls_frame, text='(YYYY-MM-DD HH:MM:SS)', font='Courier 12').grid(row=1, column=0, padx=50, sticky='s') self.start = StringVar() ttk.Entry(controls_frame, width=19, textvariable=self.start, font='Courier 12').grid(row=2, column=0, sticky='n') self.start.set(str(num2date(self.datetime_array[0]))[0:19]) ttk.Label(controls_frame, text='End', font='Arial 18 bold').grid(row=0, column=1, pady=5) ttk.Label(controls_frame, text='(YYYY-MM-DD HH:MM:SS)', font='Courier 12').grid(row=1, column=1, padx=50, sticky='s') self.end = StringVar() ttk.Entry(controls_frame, width=19, textvariable=self.end, font='Courier 12').grid(row=2, column=1, sticky='n') self.end.set(str(num2date(self.datetime_array[-1]))[0:19]) # add button for update time range ttk.Button(controls_frame, text='Update', command=self._update).grid(row=3, column=0, columnspan=2, pady=10) ttk.Style().configure('TButton', font='Arial 18 bold') # call _update() to draw default figure self._update()
def reload_signals_from_timestamp(patientSigDF, timeStamp=None, deltaIdx=0): """ Reload signals from processed npy file corresponding to timepoint of interest (or file that is deltaIdx files ahead or beind timepoint of interest) timeStamp can be pandas Timestamp object, datetime object, 6-tuple datetimes or UTC time integer eg. 2:30:47 pm on 17 January 2015 in 6-tuple is: (2015, 1, 17, 14, 30, 47) patientSigDF is the patient specific dataframe that is generated undert the subjects.Patient class in ecogtools.recordingparams """ if timeStamp is None: loadIdx = 0 else: if isinstance(timeStamp, pd.tslib.Timestamp) or isinstance( timeStamp, datetime.datetime): timepoint = timeStamp elif isinstance(timeStamp, int) or isinstance( timeStamp, float) or isinstance(timeStamp, tuple): timepoint = utils.convert_timestamp([timeStamp])['datetime'] sigIdx_temp = np.where((patientSigDF['startT'] <= timepoint) & (patientSigDF['endT'] >= timepoint))[0] if len(sigIdx_temp) == 0: print('No data at time {0}'.format(timepoint)) loadIdx = None else: sigIdx = np.where((patientSigDF['startT'] <= timepoint) & (patientSigDF['endT'] >= timepoint))[0][0] loadIdx = sigIdx + deltaIdx tAxis = {} if loadIdx is not None and loadIdx <= len(patientSigDF['signalsFile']): signals = np.load(patientSigDF['signalsFile'][loadIdx], mmap_mode='r') tAxis['UTC'] = np.load(patientSigDF['signalsFile'][loadIdx].replace( '_signal.npy', '_time.npy'), mmap_mode='r') # Seconds since start of recording nTimePoints = len(tAxis['UTC']) #sampFq = round((nTimePoints)/(tAxis['UTC'][-1]-tAxis['UTC'][0])) sampFq = patientSigDF['sampFq'][loadIdx] nSec = nTimePoints / float(sampFq) tAxis['sec'] = np.linspace(0, nSec, nTimePoints) # matplotlib time axis (days since 0001-01-01 UTC plus 1) startObj = datetime.datetime.fromtimestamp(tAxis['UTC'][0]) endObj = datetime.datetime.fromtimestamp(tAxis['UTC'][-1]) mplStart = mdates.date2num(startObj) mplEnd = mdates.date2num(endObj) tAxis['matplotlib'] = np.linspace(mplStart, mplEnd, nTimePoints) fileID = patientSigDF['signalsFile'][loadIdx] else: print('Index out of bounds') signals = None tAxis = None sampFq = None fileID = None return {'signals': signals, 'tAxis': tAxis, 'sampFq': sampFq}, fileID