def plot_stat(rows, cache): "Use matplotlib to plot DAS statistics" if not PLOT_ALLOWED: raise Exception('Matplotlib is not available on the system') if cache in ['cache', 'merge']: # cachein, cacheout, mergein, mergeout name_in = '%sin' % cache name_out = '%sout' % cache else: # webip, webq, cliip, cliq name_in = '%sip' % cache name_out = '%sq' % cache def format_date(date): "Format given date" val = str(date) return '%s-%s-%s' % (val[:4], val[4:6], val[6:8]) date_range = [r['date'] for r in rows] formated_dates = [format_date(str(r['date'])) for r in rows] req_in = [r[name_in] for r in rows] req_out = [r[name_out] for r in rows] plt.plot(date_range, req_in , 'ro-', date_range, req_out, 'gv-', ) plt.grid(True) plt.axis([min(date_range), max(date_range), \ 0, max([max(req_in), max(req_out)])]) plt.xticks(date_range, tuple(formated_dates), rotation=17) # plt.xlabel('dates [%s, %s]' % (date_range[0], date_range[-1])) plt.ylabel('DAS %s behavior' % cache) plt.savefig('das_%s.pdf' % cache, format='pdf', transparent=True) plt.close()
def plot_stat(rows, cache): "Use matplotlib to plot DAS statistics" if not PLOT_ALLOWED: raise Exception('Matplotlib is not available on the system') if cache in ['cache', 'merge']: # cachein, cacheout, mergein, mergeout name_in = '%sin' % cache name_out = '%sout' % cache else: # webip, webq, cliip, cliq name_in = '%sip' % cache name_out = '%sq' % cache def format_date(date): "Format given date" val = str(date) return '%s-%s-%s' % (val[:4], val[4:6], val[6:8]) date_range = [r['date'] for r in rows] formated_dates = [format_date(str(r['date'])) for r in rows] req_in = [r[name_in] for r in rows] req_out = [r[name_out] for r in rows] plt.plot(date_range, req_in , 'ro-',\ date_range, req_out, 'gv-',) plt.grid(True) plt.axis([min(date_range), max(date_range), \ 0, max([max(req_in), max(req_out)])]) plt.xticks(date_range, tuple(formated_dates), rotation=17) # plt.xlabel('dates [%s, %s]' % (date_range[0], date_range[-1])) plt.ylabel('DAS %s behavior' % cache) plt.savefig('das_%s.pdf' % cache, format='pdf', transparent=True) plt.close()
def plot_zipf(*freq): ''' basic plotting using matplotlib and pylab ''' ranks, frequencies = [], [] langs, colors = [], [] langs = ["English", "German", "Finnish"] colors = ['#FF0000', '#00FF00', '#0000FF'] if bonus_part: colors.extend(['#00FFFF', '#FF00FF', '#FFFF00']) langs.extend(["English (Stemmed)", "German (Stemmed)", "Finnish (Stemmed)"]) plt.subplot(111) # 1, 1, 1 num = 6 if bonus_part else 3 for i in xrange(num): ranks.append(range(1, len(freq[i]) + 1)) frequencies.append([e[1] for e in freq[i]]) # log x and y axi, both with base 10 plt.loglog(ranks[i], frequencies[i], marker='', basex=10, color=colors[i], label=langs[i]) plt.legend() plt.grid(True) plt.title("Zipf's law!") plt.xlabel('Rank') plt.ylabel('Frequency') plt.show()
def serve_css(name, length, keys, values): from pylab import plt, mpl mpl.rcParams['font.sans-serif'] = ['SimHei'] mpl.rcParams['axes.unicode_minus'] = False from matplotlib.font_manager import FontProperties # font = FontProperties(fname="d:\Users\ll.tong\Desktop\msyh.ttf", size=12) font = FontProperties(fname="/usr/share/fonts/msyh.ttf", size=11) plt.xlabel(u'') plt.ylabel(u'出现次数',fontproperties=font) plt.title(u'词频统计',fontproperties=font) plt.grid() keys = keys.decode("utf-8").split(' ') values = values.split(' ') valuesInt = [] for value in values: valuesInt.append(int(value)) plt.xticks(range(int(length)), keys) plt.plot(range(int(length)), valuesInt) plt.xticks(rotation=defaultrotation, fontsize=9,fontproperties=font) plt.yticks(fontsize=10,fontproperties=font) name = name + str(datetime.now().date()).replace(':', '') + '.png' imgUrl = 'static/temp/' + name fig = matplotlib.pyplot.gcf() fig.set_size_inches(12.2, 2) plt.savefig(imgUrl, bbox_inches='tight', figsize=(20,4), dpi=100) plt.close() tempfile = static_file(name, root='./static/temp/') #os.remove(imgUrl) return tempfile
def plot_weightings(): """Plots all weighting functions defined in :module: splweighting.""" from scipy.signal import freqz from pylab import plt, np sample_rate = 48000 num_samples = 2 * 4096 fig, ax = plt.subplots() for name, weight_design in sorted(_weighting_coeff_design_funsd.items()): b, a = weight_design(sample_rate) w, H = freqz(b, a, worN=num_samples) freq = w * sample_rate / (2 * np.pi) ax.semilogx(freq, 20 * np.log10(np.abs(H) + 1e-20), label='{}-Weighting'.format(name)) plt.legend(loc='lower right') plt.xlabel('Frequency / Hz') plt.ylabel('Damping / dB') plt.grid(True) plt.axis([10, 20000, -80, 5]) return fig, ax
def example_filterbank(): from pylab import plt import numpy as np x = _create_impulse(2000) gfb = GammatoneFilterbank(density=1) analyse = gfb.analyze(x) imax, slopes = gfb.estimate_max_indices_and_slopes() fig, axs = plt.subplots(len(gfb.centerfrequencies), 1) for (band, state), imx, ax in zip(analyse, imax, axs): ax.plot(np.real(band)) ax.plot(np.imag(band)) ax.plot(np.abs(band)) ax.plot(imx, 0, 'o') ax.set_yticklabels([]) [ax.set_xticklabels([]) for ax in axs[:-1]] axs[0].set_title('Impulse responses of gammatone bands') fig, ax = plt.subplots() def plotfun(x, y): ax.semilogx(x, 20 * np.log10(np.abs(y)**2)) gfb.freqz(nfft=2 * 4096, plotfun=plotfun) plt.grid(True) plt.title('Absolute spectra of gammatone bands.') plt.xlabel('Normalized Frequency (log)') plt.ylabel('Attenuation /dB(FS)') plt.axis('Tight') plt.ylim([-90, 1]) plt.show() return gfb
def plot2(self, figNum, time1, data1, time2, data2, title='', units='', options=''): plt.figure(figNum) # plt.hold(True); plt.grid(True) if title: self.title = title if not units: self.units = units # plt.cla() if self.preTitle: fig = plt.gcf() fig.canvas.set_window_title("Figure %d - %s" % (figNum, self.preTitle)) plt.title("%s" % (self.title)) plt.plot(time1, data1, options) plt.plot(time2, data2, options) plt.ylabel('(%s)' % (self.units)) plt.xlabel('Time (s)') plt.margins(0.04)
def plot_weightings(): """Plots all weighting functions defined in :module: splweighting.""" from scipy.signal import freqz from pylab import plt, np sample_rate = 48000 num_samples = 2*4096 fig, ax = plt.subplots() for name, weight_design in sorted( _weighting_coeff_design_funsd.items()): b, a = weight_design(sample_rate) w, H = freqz(b, a, worN=num_samples) freq = w*sample_rate / (2*np.pi) ax.semilogx(freq, 20*np.log10(np.abs(H)+1e-20), label='{}-Weighting'.format(name)) plt.legend(loc='lower right') plt.xlabel('Frequency / Hz') plt.ylabel('Damping / dB') plt.grid(True) plt.axis([10, 20000, -80, 5]) return fig, ax
def subplotSingle2x(self, figNum, plotNum, numRows, numCols, time, data, title='', units='', options=''): print("subplotSingle2x") plt.figure(figNum) if title: self.title = title if not units: self.units = units if self.preTitle: fig = plt.gcf() fig.canvas.set_window_title("%s" % (figNum, self.preTitle)) if not figNum in self.sharex.keys(): self.sharex[figNum] = plt.subplot(numRows, numCols, plotNum) plt.plot(time, data, options) plt.subplot(numRows, numCols, plotNum, sharex=self.sharex[figNum]) # plt.hold(True); plt.grid(True) plt.title("%s" % (self.title)) plt.plot(time, data, options) plt.ylabel('(%s)' % (self.units)) plt.margins(0.04)
def plot_post_disp_decomposition( self, site, cmpt, loc=2, leg_fs=7, marker_for_obs='x', ): y = self.plot_post_obs_linres(site, cmpt, label='obs.', marker=marker_for_obs) y += self.plot_post_disp_pred_added(site, cmpt, label='pred.') y += self.plot_R_co(site, cmpt, style='-^', label='Rco', color='orange') y += self.plot_E_aslip(site, cmpt, color='green') y += self.plot_R_aslip(site, cmpt, color='black') plt.grid('on') plt.legend(loc=loc, prop={'size': leg_fs}) plt.ylabel(r'meter') plt.gcf().autofmt_xdate() plt.title('Postseismic Disp. : {site} - {cmpt}'.format( site=get_site_true_name(site_id=site), cmpt=cmpt))
def example_filterbank(): from pylab import plt import numpy as np x = _create_impulse(2000) gfb = GammatoneFilterbank(density=1) analyse = gfb.analyze(x) imax, slopes = gfb.estimate_max_indices_and_slopes() fig, axs = plt.subplots(len(gfb.centerfrequencies), 1) for (band, state), imx, ax in zip(analyse, imax, axs): ax.plot(np.real(band)) ax.plot(np.imag(band)) ax.plot(np.abs(band)) ax.plot(imx, 0, 'o') ax.set_yticklabels([]) [ax.set_xticklabels([]) for ax in axs[:-1]] axs[0].set_title('Impulse responses of gammatone bands') fig, ax = plt.subplots() def plotfun(x, y): ax.semilogx(x, 20*np.log10(np.abs(y)**2)) gfb.freqz(nfft=2*4096, plotfun=plotfun) plt.grid(True) plt.title('Absolute spectra of gammatone bands.') plt.xlabel('Normalized Frequency (log)') plt.ylabel('Attenuation /dB(FS)') plt.axis('Tight') plt.ylim([-90, 1]) plt.show() return gfb
def plot_cumu_disp_decomposition(self, site, cmpt, loc=2, leg_fs=7, if_ylim=False): self.plot_cumu_obs_linres(site, cmpt) y = self.plot_cumu_disp_pred_added(site, cmpt, label='pred.') y += self.plot_R_co(site, cmpt, style='-^', label='Rco', color='orange') y += self.plot_E_cumu_slip(site, cmpt, color='green') y += self.plot_R_aslip(site, cmpt, color='black') plt.grid('on') if if_ylim: plt.ylim(calculate_lim(y)) plt.ylabel(r'meter') plt.legend(loc=loc, prop={'size': leg_fs}) plt.gcf().autofmt_xdate() plt.title('Cumulative Disp.: {site} - {cmpt}'.format( site=get_site_true_name(site_id=site), cmpt=cmpt))
def test_screenstate_1(self): from gdesk import gui from pylab import plt from pathlib import Path gui.load_layout('console') samplePath = Path(r'./samples') gui.img.select(1) gui.img.open(samplePath / 'kodim05.png') gui.img.zoom_fit() plt.plot(gui.vs.mean(2).mean(1)) plt.title('Column means of image 1') plt.xlabel('Column Number') plt.ylabel('Mean') plt.grid() plt.show() gui.img.select(2) gui.img.open(samplePath / 'kodim23.png') gui.img.zoom_full() plt.figure() plt.plot(gui.vs.mean(2).mean(0)) plt.title('Row means of image 2') plt.xlabel('Row Number') plt.ylabel('Mean') plt.grid() plt.show()
def plot_pre(fn): t = read_t(fn) y = read_y(fn) yres = read_yres(fn) plt.plot_date(t+_adj_dates, y, 'x', color='lightblue') plt.plot_date(t+_adj_dates, yres, 'x', color='lightgreen') linsec = read_linsec(fn) ch = cut_ts(t, linsec) plt.plot_date(t[ch]+_adj_dates, y[ch], 'x', color='blue', label='original') plt.plot_date(t[ch]+_adj_dates, yres[ch], 'x', color='green', label='residual') outliers = read_outlier(fn) idx = outlier_index(t, outliers) plt.plot_date(t[idx]+_adj_dates, y[idx], 'o', mec='red', mew=1, mfc='blue') plt.plot_date(t[idx]+_adj_dates, yres[idx], 'o', mec='red', mew=1, mfc='green') for jump in read_jumps(fn): plt.axvline(jump + _adj_dates, color='red', ls='--') plt.grid('on') site = basename(fn).split('.')[0] cmpt = basename(fn).split('.')[1] plt.title('%s - %s'%(site, cmpt))
def plot(self, new_plot=False, xlim=None, ylim=None, title=None, figsize=None, xlabel=None, ylabel=None, fontsize=None, show_legend=True, grid=True): """ Plot data using matplotlib library. Use show() method for matplotlib to see result or :: %pylab inline in IPython to see plot as cell output. :param bool new_plot: create or not new figure :param xlim: x-axis range :param ylim: y-axis range :type xlim: None or tuple(x_min, x_max) :type ylim: None or tuple(y_min, y_max) :param title: title :type title: None or str :param figsize: figure size :type figsize: None or tuple(weight, height) :param xlabel: x-axis name :type xlabel: None or str :param ylabel: y-axis name :type ylabel: None or str :param fontsize: font size :type fontsize: None or int :param bool show_legend: show or not labels for plots :param bool grid: show grid or not """ xlabel = self.xlabel if xlabel is None else xlabel ylabel = self.ylabel if ylabel is None else ylabel figsize = self.figsize if figsize is None else figsize fontsize = self.fontsize if fontsize is None else fontsize self.fontsize_ = fontsize self.show_legend_ = show_legend title = self.title if title is None else title xlim = self.xlim if xlim is None else xlim ylim = self.ylim if ylim is None else ylim new_plot = self.new_plot or new_plot if new_plot: plt.figure(figsize=figsize) plt.xlabel(xlabel, fontsize=fontsize) plt.ylabel(ylabel, fontsize=fontsize) plt.title(title, fontsize=fontsize) plt.tick_params(axis='both', labelsize=fontsize) plt.grid(grid) if xlim is not None: plt.xlim(xlim) if ylim is not None: plt.ylim(ylim) self._plot() if show_legend: plt.legend(loc='best', scatterpoints=1)
def plot_mat(self, mat, fn): plt.matshow(asarray(mat.todense())) plt.axis('equal') sh = mat.shape plt.gca().set_yticks(range(0, sh[0])) plt.gca().set_xticks(range(0, sh[1])) plt.grid('on') plt.colorbar() plt.savefig(join(self.outs_dir, fn)) plt.close()
def plot_mat(self, mat, fn): plt.matshow(asarray(mat.todense())) plt.axis('equal') sh = mat.shape plt.gca().set_yticks(range(0,sh[0])) plt.gca().set_xticks(range(0,sh[1])) plt.grid('on') plt.colorbar() plt.savefig(join(self.outs_dir, fn)) plt.close()
def plotSigmoidTanh(fname=None): fig, ax = plt.subplots() xs = np.linspace(-10.0, 10.0, num = 50, endpoint=True) ys = [sigmoidTanh(x, 0.9) for x in xs] ax.plot(xs, ys, 'black') plt.title("y=sigmoid(s)") plt.grid(True) if fname: plt.savefig(fname) plt.show()
def test_dip(self): xf = arange(0, 425) dips = self.fm.get_dip(xf) plt.plot(xf,dips) plt.grid('on') plt.gca().set_xticks(self.fm.Y_PC) plt.ylim([0, 30]) plt.gca().invert_yaxis() plt.savefig(join(self.outs_dir, '~y_fc_dips.png')) plt.close()
def plot_iou(checkpoint_dir, iou_list): x = range(0, len(iou_list)) y = iou_list plt.switch_backend('agg') plt.plot(x, y, color='red', marker='o', label='IOU') plt.xticks(range(0, len(iou_list) + 3, (len(iou_list) + 10) // 10)) plt.legend() plt.grid() plt.savefig(os.path.join(checkpoint_dir, 'iou_fig.pdf')) plt.close()
def plot_loss(checkpoint_dir, loss_list, save_pred_every): x = range(0, len(loss_list) * save_pred_every, save_pred_every) y = loss_list plt.switch_backend('agg') plt.plot(x, y, color='blue', marker='o', label='Train loss') plt.xticks(range(0, len(loss_list) * save_pred_every + 3, (len(loss_list) * save_pred_every + 10) // 10)) plt.legend() plt.grid() plt.savefig(os.path.join(checkpoint_dir, 'loss_fig.pdf')) plt.close()
def test_dip(self): xf = arange(0, 425) dips = self.fm.get_dip(xf) plt.plot(xf, dips) plt.grid('on') plt.gca().set_xticks(self.fm.Y_PC) plt.ylim([0, 30]) plt.gca().invert_yaxis() plt.savefig(join(self.outs_dir, '~y_fc_dips.png')) plt.close()
def plot_precisonAndjac(checkpoint_dir, pre_list, jac_list): x = range(0, len(pre_list)) y = pre_list y2 = jac_list plt.switch_backend('agg') plt.plot(x, y, color='red', marker='o', label='precision') plt.plot(x, y2, color='blue', marker='o', label='jaccard') plt.xticks(range(0, len(pre_list) + 3, (len(pre_list) + 10) // 10)) plt.legend() plt.grid() plt.savefig(os.path.join(checkpoint_dir, 'precisionAndjac_fig1.pdf')) plt.close()
def plot_fault_framework(fault_framework): fm = fault_framework plt.plot(fm.Y_PC, fm.DEP, '-o') plt.axis('equal') plt.axhline(0, color='black') plt.gca().set_yticks(fm.DEP) plt.gca().set_xticks(fm.Y_PC) plt.grid('on') plt.xlabel('From trench to continent(km)') plt.ylabel('depth (km)') for xi, yi, dip in zip(fm.Y_PC, fm.DEP, fm.DIP_D): plt.text(xi, yi, 'dip = %.1f'%dip) plt.gca().invert_yaxis()
def plotSigmoidBias(fname=None): fig, ax = plt.subplots() xs = np.linspace(-10.0, 10.0, num = 50, endpoint=True) ys = [sigmoid(1.0 * x - 5.0) for x in xs] ax.plot(xs, ys, 'black', linestyle='-', label='sig(1.0 * x - 1.0 * 5)') ys = [sigmoid(1.0 * x) for x in xs] ax.plot(xs, ys, 'black', linestyle='--', label='sig(1.0 * x + 1.0 * 0)') ys = [sigmoid(1.0 * x + 5.0) for x in xs] ax.plot(xs, ys, 'black', linestyle='-.', label='sig(1.0 * x + 1.0 * 5)') legend = ax.legend(loc='best', framealpha=0.5) plt.title("y=sig(s * w1 + 1.0 * w2)") plt.grid(True) if fname: plt.savefig(fname) plt.show()
def plot_fault_framework(fault_framework): fm = fault_framework plt.plot(fm.Y_PC, fm.DEP, '-o') plt.axis('equal') plt.axhline(0, color='black') plt.gca().set_yticks(fm.DEP) plt.gca().set_xticks(fm.Y_PC) plt.grid('on') plt.xlabel('From trench to continent(km)') plt.ylabel('depth (km)') for xi, yi, dip in zip(fm.Y_PC, fm.DEP, fm.DIP_D): plt.text(xi, yi, 'dip = %.1f' % dip) plt.gca().invert_yaxis()
def draw(x, y, x_text, y_text, title): plt.figure(figsize=(30, 5)) plt.plot(x, y, color='red', label='data_check_result') for i in range(1, len(x)): plt.text(x[i], y[i], str((x[i], round(y[i], 4)))) #plt.text(x,y,(x,y),color='red') plt.xlabel(x_text) plt.ylabel(y_text) plt.title(title) plt.grid(True) plt.legend() pic = time.strftime("%Y-%m-%d_%H_%S_%M", time.localtime()) + ".pdf" plt.savefig(pic) plt.show()
def test_dep(self): xf = arange(0, 425) deps = self.fm.get_dep(xf) plt.plot(xf, deps) plt.gca().set_yticks(self.fm.DEP) plt.gca().set_xticks(self.fm.Y_PC) plt.grid('on') plt.title('Ground x versus depth') plt.xlabel('Ground X (km)') plt.ylabel('depth (km)') plt.axis('equal') plt.gca().invert_yaxis() plt.savefig(join(self.outs_dir, '~Y_PC_vs_deps.png')) plt.close()
def plotMap(self, fname=None): fig, ax = plt.subplots() ax.plot([self.o1[0], self.a[0], self.o2[0]], [self.o1[1], self.a[1], self.o2[1]], 'r', label='Trajectory 0') ax.plot([self.o1[0], self.b[0], self.o2[0]], [self.o1[1], self.b[1], self.o2[1]], 'b--', label='Trajectory 1') legend = ax.legend(loc='best', framealpha=0.5) plt.title("Map") plt.grid(True) if fname: plt.savefig(fname) plt.show()
def test_dep(self): xf = arange(0, 425) deps = self.fm.get_dep(xf) plt.plot(xf,deps) plt.gca().set_yticks(self.fm.DEP) plt.gca().set_xticks(self.fm.Y_PC) plt.grid('on') plt.title('Ground x versus depth') plt.xlabel('Ground X (km)') plt.ylabel('depth (km)') plt.axis('equal') plt.gca().invert_yaxis() plt.savefig(join(self.outs_dir, '~Y_PC_vs_deps.png')) plt.close()
def plotNE(self, figNum, north, east, title='', units='', options=''): plt.figure(figNum) # plt.cla() # plt.hold(True); plt.grid(True) if title: self.title = title if not units: self.units = units if self.preTitle: fig = plt.gcf() fig.canvas.set_window_title("%s" % (self.preTitle)) plt.title("%s" % (self.title)) plt.plot(east, north, options) plt.xlabel('East (%s)' % (self.units)) plt.ylabel('North (%s)' % (self.units))
def plot_pre(fn): t = read_t(fn) y = read_y(fn) yres = read_yres(fn) plt.plot_date(t + _adj_dates, y, 'x', color='lightblue') plt.plot_date(t + _adj_dates, yres, 'x', color='lightgreen') linsec = read_linsec(fn) ch = cut_ts(t, linsec) plt.plot_date(t[ch] + _adj_dates, y[ch], 'x', color='blue', label='original') plt.plot_date(t[ch] + _adj_dates, yres[ch], 'x', color='green', label='residual') outliers = read_outlier(fn) idx = outlier_index(t, outliers) plt.plot_date(t[idx] + _adj_dates, y[idx], 'o', mec='red', mew=1, mfc='blue') plt.plot_date(t[idx] + _adj_dates, yres[idx], 'o', mec='red', mew=1, mfc='green') for jump in read_jumps(fn): plt.axvline(jump + _adj_dates, color='red', ls='--') plt.grid('on') site = basename(fn).split('.')[0] cmpt = basename(fn).split('.')[1] plt.title('%s - %s' % (site, cmpt))
def test_code_3(self): from gdesk import gui from pylab import plt plt.plot(gui.vs.mean(1)) plt.grid(True) plt.title('Column Means') plt.show() plt.figure() plt.plot(gui.vs.mean(0)) plt.grid(True) plt.title('Row Means') plt.show() answer = gui.question('Looks everything OK?') plt.close('all') gui.menu_trigger('image', GammaDeskSuite.panid, ['Edit', 'Show Prior Image'])
def _plot_base(dep, val, deplim_small, xlim_small, xlabel): plt.subplot(1, 2, 1) plt.plot(val, dep) plt.gca().invert_yaxis() plt.grid('on') plt.ylabel('depth/km') plt.xlabel(xlabel) locs, labels = plt.xticks() plt.setp(labels, rotation=-45) plt.subplot(1, 2, 2) plt.plot(val, dep) plt.gca().invert_yaxis() plt.grid('on') plt.ylim(deplim_small) plt.xlim(xlim_small) plt.xlabel(xlabel) locs, labels = plt.xticks() plt.setp(labels, rotation=-45)
def _plot_base(dep, val, deplim_small, xlim_small, xlabel): plt.subplot(1,2,1) plt.plot(val, dep) plt.gca().invert_yaxis() plt.grid('on') plt.ylabel('depth/km') plt.xlabel(xlabel) locs, labels = plt.xticks() plt.setp(labels, rotation=-45) plt.subplot(1,2,2) plt.plot(val, dep) plt.gca().invert_yaxis() plt.grid('on') plt.ylim(deplim_small) plt.xlim(xlim_small) plt.xlabel(xlabel) locs, labels = plt.xticks() plt.setp(labels, rotation=-45)
def freqz(sosmat, nsamples=44100, sample_rate=44100, plot=True): """Plots Frequency response of sosmat.""" from pylab import np, plt, fft, fftfreq x = np.zeros(nsamples) x[int(nsamples/2)] = 0.999 y, states = sosfilter_double_c(x, sosmat) Y = fft(y) f = fftfreq(len(x), 1.0/sample_rate) if plot: plt.grid(True) plt.axis([0, sample_rate / 2, -100, 5]) L = 20*np.log10(np.abs(Y[:int(len(x)/2)]) + 1e-17) plt.semilogx(f[:int(len(x)/2)], L, lw=0.5) plt.hold(True) plt.title(u'freqz sos filter') plt.xlabel('Frequency / Hz') plt.ylabel(u'Damping /dB(FS)') plt.xlim((10, sample_rate/2)) plt.hold(False) return x, y, f, Y
def animate(S0, u, d, p, T, N, P=10): ''' S: data NumSims: simulation size numPaths: no. of simulated paths shown ''' S = simulate(S0, u, d, p, T, N) fig, mainplot = plt.subplots(figsize=(10, 5)) mainplot.plot(S[:, :P]) plt.grid(True) plt.xlabel('time step') plt.ylabel('price') divider = make_axes_locatable(mainplot) axHist = divider.append_axes("right", 2.5, pad=0.1, sharey=mainplot) axHist.hist(S[-1, :N], bins=15, orientation='horizontal', normed=True) axHist.yaxis.set_ticks_position("right") axHist.xaxis.set_major_formatter(FuncFormatter('{0:.1%}'.format)) plt.grid(True) plt.xlabel('probability') plt.show()
def plot_vel_decomposition(self, site, cmpt, loc=0, leg_fs=7, if_ylim=False ): y = self.plot_pred_vel_added(site, cmpt, label='total') y += self.plot_vel_R_co(site, cmpt, style='-^', label='Rco', color='orange') y += self.plot_vel_E_cumu_slip(site, cmpt, color='green') y += self.plot_vel_R_aslip(site, cmpt, color='black') plt.grid('on') if if_ylim: plt.ylim(calculate_lim(y)) plt.ylabel(r'mm/yr') plt.legend(loc=loc, prop={'size':leg_fs}) plt.gcf().autofmt_xdate() plt.title('Cumulative Disp.: {site} - {cmpt}'.format( site = get_site_true_name(site_id=site), cmpt = cmpt ))
def freqz(sosmat, nsamples=44100, sample_rate=44100, plot=True): """Plots Frequency response of sosmat.""" from pylab import np, plt, fft, fftfreq x = np.zeros(nsamples) x[nsamples/2] = 0.999 y, states = sosfilter_double_c(x, sosmat) Y = fft(y) f = fftfreq(len(x), 1.0/sample_rate) if plot: plt.grid(True) plt.axis([0, sample_rate / 2, -100, 5]) L = 20*np.log10(np.abs(Y[:len(x)/2]) + 1e-17) plt.semilogx(f[:len(x)/2], L, lw=0.5) plt.hold(True) plt.title('freqz sos filter') plt.xlabel('Frequency / Hz') plt.ylabel('Damping /dB(FS)') plt.xlim((10, sample_rate/2)) plt.hold(False) return x, y, f, Y
def do_plot(): if solver.iter % display == 0: loss[solver.iter] = solver.net.blobs['loss3/loss3'].data.copy() loss_disp = 'loss=' + str(loss[solver.iter]) print '%3d) %s' % (solver.iter, loss_disp) train_loss[solver.iter / display] = loss[solver.iter] ax1.plot(it_axes[0:solver.iter / display], train_loss[0:solver.iter / display], 'r') # if it > test_interval: # ax1.plot(it_val_axes[0:it/test_interval], val_loss[0:it/test_interval], 'g') #Val always on top ax1.set_ylim([5, 7]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) # VALIDATE Validation done this way only uses 1 GPU if solver.iter % test_interval == 0 and solver.iter > 0: loss_val = 0 for i in range(test_iters): solver.test_nets[0].forward() loss_val += solver.test_nets[0].blobs['loss3/loss3'].data loss_val /= test_iters print("Val loss: {:.3f}".format(loss_val)) val_loss[solver.iter / test_interval - 1] = loss_val ax1.plot(it_val_axes[0:solver.iter / test_interval], val_loss[0:solver.iter / test_interval], 'g') ax1.set_ylim([5, 7]) plt.title(training_id) plt.ion() plt.grid(True) plt.show() plt.pause(0.001) title = '../../../datasets/SocialMedia/models/training/' + training_id + str( solver.iter) + '.png' # Save graph to disk savefig(title, bbox_inches='tight')
def plot_post_disp_decomposition(self, site, cmpt, loc=2, leg_fs=7, added_label = None, marker_for_obs = 'x', ): y = self.plot_post_obs_linres(site,cmpt, label='obs.', marker=marker_for_obs) y += self.plot_post_disp_pred_from_result_file(site,cmpt, label='pred.') y += self.plot_R_co(site, cmpt, style = '-^', label='Rco', color='orange') y += self.plot_E_aslip(site, cmpt, color='green') plt.grid('on') self.plot_post_disp_pred_added(site, cmpt, label=added_label) plt.legend(loc=loc, prop={'size':leg_fs}) plt.ylabel(r'm') plt.gcf().autofmt_xdate() plt.title('Postseismic Disp. : {site} - {cmpt}'.format( site = get_site_true_name(site_id = site), cmpt = cmpt ))
def plot(self, good, bad, dataset0, dataset1, fname=None): fig, ax = plt.subplots() ax.plot([self.o1[0], self.a[0], self.o2[0]], [self.o1[1], self.a[1], self.o2[1]], 'r', label='Trajectory 0') ax.plot([self.o1[0], self.b[0], self.o2[0]], [self.o1[1], self.b[1], self.o2[1]], 'b--', label='Trajectory 1') if dataset0.any(): ax.plot(dataset0[:, 0], dataset0[:, 1], 'ro', label='Train Dataset 0') if dataset1.any(): ax.plot(dataset1[:, 0], dataset1[:, 1], 'b*', label='Train Dataset 1') if good.any(): ax.plot(good[:, 0], good[:, 1], 'go', markersize=10, label='Correct prediction') if bad.any(): ax.plot(bad[:, 0], bad[:, 1], 'black', linestyle='none', marker='D', markersize=10, label='Incorrect prediction') legend = ax.legend(loc='best', framealpha=0.5) plt.title("Map") plt.grid(True) if fname: plt.savefig(fname) plt.show()
def plot_cumu_disp_decomposition(self, site, cmpt, loc=2, leg_fs=7, if_ylim=False, added_label = None, ): self.plot_cumu_obs_linres(site, cmpt) y = self.plot_cumu_disp_pred_from_result_file(site, cmpt, label='pred.') y += self.plot_R_co(site, cmpt, style='-^', label='Rco', color='orange') y += self.plot_E_cumu_slip(site, cmpt, color='green') plt.grid('on') if if_ylim: plt.ylim(calculate_lim(y)) self.plot_cumu_disp_pred_added(site, cmpt, label=added_label) plt.ylabel(r'm') plt.legend(loc=loc, prop={'size':leg_fs}) plt.gcf().autofmt_xdate() plt.title('Cumulative Disp.: {site} - {cmpt}'.format( site = get_site_true_name(site_id=site), cmpt = cmpt ))
def get_axis(): fig, axes = plt.subplots(ncols=1, nrows=1, sharex=True, figsize=(7, 4.7), dpi=80, facecolor='w', edgecolor='k') # 1 inch = 2.5cm fig.subplots_adjust(right=0.90, bottom=0.15, top=0.85, hspace=0.2, wspace=0.1) # gcf() means get the current fig, so plt.gcf().axes() means create axes in current fig # grid on # plt.grid(color='r', linestyle='--', linewidth=1,alpha=0.3) plt.grid(color='lightgray', linestyle='-', linewidth=0.5, alpha=0.3) #figure ax size setting return axes
def plot_L_curve( files, nlin_pars=['log10_He_', 'log10_visM_', 'rake'], nlin_pars_ylabels=[r'$log_{10}(He)$', r'$log_{10}(visM)$', 'rake'], ): nreses = collect_from_result_files(files, 'residual_norm_weighted') nroughs = collect_from_result_files(files, 'roughening_norm') num_subplots = 1 + len(nlin_pars) x1 = amin(nreses) x2 = amax(nreses) dx = x2 - x1 xlim = (x1 - dx * 0.02, x2 + dx * 0.2) xticks = range(int(x1), int(x2), 5) plt.subplot(num_subplots, 1, 1) plt.loglog(nreses, nroughs, 'o-') plt.xlim(xlim) plt.gca().set_xticks(xticks) plt.gca().get_xaxis().set_major_formatter( matplotlib.ticker.ScalarFormatter()) plt.ylabel('roughening') plt.xlabel('Residual Norm') plt.grid('on') nth = 2 for par, par_label in zip(nlin_pars, nlin_pars_ylabels): y = collect_from_result_files(files, par) plt.subplot(num_subplots, 1, nth) plt.semilogx(nreses, y, 'o-') plt.xlim(xlim) plt.gca().set_xticks(xticks) plt.gca().get_xaxis().set_major_formatter( matplotlib.ticker.ScalarFormatter()) plt.ylabel(par_label) plt.xlabel('Residual Norm') plt.grid('on') nth += 1
def plotLive(combine_Type, combine_Name, lat_Name, long_Name, massFlow_Name, filename): data = pd.read_csv(filename) if combine_Type != 0: comb_df = data[data[combine_Name] == combine_Type] lat_df = comb_df[lat_Name] lon_df = comb_df[long_Name] y = comb_df[massFlow_Name] else: lat_df = data[lat_Name] lon_df = data[long_Name] y = data[massFlow_Name] e,n = convertToUTM(lat_df, lon_df) def makeFig(): plt.plot(x,y) plt.ylabel('Easting') plt.xlabel('Northing') plt.ion() # enable interactivity plt.grid() fig = plt.figure() # make a figure x=list() y=list() for i in arange(len(n)): x.append(n[i]) y.append(e[i]) i+=1 drawnow(makeFig)
def plot_L_curve(files, nlin_pars = ['log10_He_','log10_visM_','rake'], nlin_pars_ylabels = [r'$log_{10}(He)$', r'$log_{10}(visM)$', 'rake'], ): nreses = collect_from_result_files(files, 'residual_norm_weighted') nroughs = collect_from_result_files(files, 'roughening_norm') num_subplots = 1 + len(nlin_pars) x1 = amin(nreses) x2 = amax(nreses) dx = x2 - x1 xlim = (x1-dx*0.02, x2+dx*0.2) xticks = range(int(x1), int(x2),5) plt.subplot(num_subplots,1,1) plt.loglog(nreses, nroughs,'o-') plt.xlim(xlim) plt.gca().set_xticks(xticks) plt.gca().get_xaxis().set_major_formatter(matplotlib.ticker.ScalarFormatter()) plt.ylabel('roughening') plt.xlabel('Residual Norm') plt.grid('on') nth = 2 for par, par_label in zip(nlin_pars, nlin_pars_ylabels): y = collect_from_result_files(files, par) plt.subplot(num_subplots,1,nth) plt.semilogx(nreses, y,'o-') plt.xlim(xlim) plt.gca().set_xticks(xticks) plt.gca().get_xaxis().set_major_formatter(matplotlib.ticker.ScalarFormatter()) plt.ylabel(par_label) plt.xlabel('Residual Norm') plt.grid('on') nth += 1
def get_linear_model_histogram(code, ptype="f", dtype="d", start=None, end=None): # 399001','cyb':'zs399006','zxb':'zs399005 # code = '999999' # code = '601608' # code = '000002' # asset = ts.get_hist_data(code)['close'].sort_index(ascending=True) # df = tdd.get_tdx_Exp_day_to_df(code, 'f').sort_index(ascending=True) df = tdd.get_tdx_append_now_df(code, ptype, start, end).sort_index(ascending=True) if not dtype == "d": df = tdd.get_tdx_stock_period_to_type(df, dtype).sort_index(ascending=True) asset = df["close"] log.info("df:%s" % asset[:1]) asset = asset.dropna() dates = asset.index if not code.startswith("999") or not code.startswith("399"): if code[:1] in ["5", "6", "9"]: code2 = "999999" elif code[:1] in ["3"]: code2 = "399006" else: code2 = "399001" df1 = tdd.get_tdx_append_now_df(code2, ptype, start, end).sort_index(ascending=True) if not dtype == "d": df1 = tdd.get_tdx_stock_period_to_type(df1, dtype).sort_index(ascending=True) asset1 = df1.loc[asset.index, "close"] startv = asset1[:1] # asset_v=asset[:1] # print startv,asset_v asset1 = asset1.apply(lambda x: round(x / asset1[:1], 2)) # print asset1[:4] # 画出价格随时间变化的图像 # _, ax = plt.subplots() # fig = plt.figure() fig = plt.figure(figsize=(16, 10)) # fig = plt.figure(figsize=(16, 10), dpi=72) # plt.subplots_adjust(bottom=0.1, right=0.8, top=0.9) plt.subplots_adjust(left=0.05, bottom=0.08, right=0.95, top=0.95, wspace=0.15, hspace=0.25) # set (gca,'Position',[0,0,512,512]) # fig.set_size_inches(18.5, 10.5) # fig=plt.fig(figsize=(14,8)) ax1 = fig.add_subplot(321) # asset=asset.apply(lambda x:round( x/asset[:1],2)) ax1.plot(asset) # ax1.plot(asset1,'-r', linewidth=2) ticks = ax1.get_xticks() ax1.set_xticklabels([dates[i] for i in ticks[:-1]]) # Label x-axis with dates # 拟合 X = np.arange(len(asset)) x = sm.add_constant(X) model = regression.linear_model.OLS(asset, x).fit() a = model.params[0] b = model.params[1] # log.info("a:%s b:%s" % (a, b)) log.info("X:%s a:%s b:%s" % (len(asset), a, b)) Y_hat = X * b + a # 真实值-拟合值,差值最大最小作为价值波动区间 # 向下平移 i = (asset.values.T - Y_hat).argmin() c_low = X[i] * b + a - asset.values[i] Y_hatlow = X * b + a - c_low # 向上平移 i = (asset.values.T - Y_hat).argmax() c_high = X[i] * b + a - asset.values[i] Y_hathigh = X * b + a - c_high plt.plot(X, Y_hat, "k", alpha=0.9) plt.plot(X, Y_hatlow, "r", alpha=0.9) plt.plot(X, Y_hathigh, "r", alpha=0.9) plt.xlabel("Date", fontsize=14) plt.ylabel("Price", fontsize=14) plt.title(code, fontsize=14) plt.grid(True) # plt.legend([code]); # plt.legend([code, 'Value center line', 'Value interval line']); # fig=plt.fig() # fig.figsize = [14,8] scale = 1.1 zp = zoompan.ZoomPan() figZoom = zp.zoom_factory(ax1, base_scale=scale) figPan = zp.pan_factory(ax1) ax2 = fig.add_subplot(323) ticks = ax2.get_xticks() ax2.set_xticklabels([dates[i] for i in ticks[:-1]]) # plt.plot(X, Y_hat, 'k', alpha=0.9) n = 5 d = (-c_high + c_low) / n c = c_high while c <= c_low: Y = X * b + a - c plt.plot(X, Y, "r", alpha=0.9) c = c + d # asset=asset.apply(lambda x:round(x/asset[:1],2)) ax2.plot(asset) # ax2.plot(asset1,'-r', linewidth=2) plt.xlabel("Date", fontsize=14) plt.ylabel("Price", fontsize=14) plt.grid(True) # plt.title(code, fontsize=14) # plt.legend([code]) # 将Y-Y_hat股价偏离中枢线的距离单画出一张图显示,对其边界线之间的区域进行均分,大于0的区间为高估,小于0的区间为低估,0为价值中枢线。 ax3 = fig.add_subplot(322) # distance = (asset.values.T - Y_hat) distance = (asset.values.T - Y_hat)[0] if code.startswith("999") or code.startswith("399"): ax3.plot(asset) plt.plot(distance) ticks = ax3.get_xticks() ax3.set_xticklabels([dates[i] for i in ticks[:-1]]) n = 5 d = (-c_high + c_low) / n c = c_high while c <= c_low: Y = X * b + a - c plt.plot(X, Y - Y_hat, "r", alpha=0.9) c = c + d ax3.plot(asset) plt.xlabel("Date", fontsize=14) plt.ylabel("Price-center price", fontsize=14) plt.grid(True) else: as3 = asset.apply(lambda x: round(x / asset[:1], 2)) ax3.plot(as3) ax3.plot(asset1, "-r", linewidth=2) plt.grid(True) zp3 = zoompan.ZoomPan() figZoom = zp3.zoom_factory(ax3, base_scale=scale) figPan = zp3.pan_factory(ax3) # plt.title(code, fontsize=14) # plt.legend([code]) # 统计出每个区域内各股价的频数,得到直方图,为了更精细的显示各个区域的频数,这里将整个边界区间分成100份。 ax4 = fig.add_subplot(325) log.info("assert:len:%s %s" % (len(asset.values.T - Y_hat), (asset.values.T - Y_hat)[0])) # distance = map(lambda x:int(x),(asset.values.T - Y_hat)/Y_hat*100) # now_distanse=int((asset.iat[-1]-Y_hat[-1])/Y_hat[-1]*100) # log.debug("dis:%s now:%s"%(distance[:2],now_distanse)) # log.debug("now_distanse:%s"%now_distanse) distance = asset.values.T - Y_hat now_distanse = asset.iat[-1] - Y_hat[-1] # distance = (asset.values.T-Y_hat)[0] pd.Series(distance).plot(kind="hist", stacked=True, bins=100) # plt.plot((asset.iat[-1].T-Y_hat),'b',alpha=0.9) plt.axvline(now_distanse, hold=None, label="1", color="red") # plt.axhline(now_distanse,hold=None,label="1",color='red') # plt.axvline(asset.iat[0],hold=None,label="1",color='red',linestyle="--") plt.xlabel("Undervalue ------------------------------------------> Overvalue", fontsize=14) plt.ylabel("Frequency", fontsize=14) # plt.title('Undervalue & Overvalue Statistical Chart', fontsize=14) plt.legend([code, asset.iat[-1]]) plt.grid(True) # plt.show() # import os # print(os.path.abspath(os.path.curdir)) ax5 = fig.add_subplot(326) # fig.figsize=(5, 10) log.info("assert:len:%s %s" % (len(asset.values.T - Y_hat), (asset.values.T - Y_hat)[0])) # distance = map(lambda x:int(x),(asset.values.T - Y_hat)/Y_hat*100) distance = (asset.values.T - Y_hat) / Y_hat * 100 now_distanse = (asset.iat[-1] - Y_hat[-1]) / Y_hat[-1] * 100 log.debug("dis:%s now:%s" % (distance[:2], now_distanse)) log.debug("now_distanse:%s" % now_distanse) # n, bins = np.histogram(distance, 50) # print n, bins[:2] pd.Series(distance).plot(kind="hist", stacked=True, bins=100) # plt.plot((asset.iat[-1].T-Y_hat),'b',alpha=0.9) plt.axvline(now_distanse, hold=None, label="1", color="red") # plt.axhline(now_distanse,hold=None,label="1",color='red') # plt.axvline(asset.iat[0],hold=None,label="1",color='red',linestyle="--") plt.xlabel("Undervalue ------------------------------------------> Overvalue", fontsize=14) plt.ylabel("Frequency", fontsize=14) # plt.title('Undervalue & Overvalue Statistical Chart', fontsize=14) plt.legend([code, asset.iat[-1]]) plt.grid(True) ax6 = fig.add_subplot(324) h = df.loc[:, ["open", "close", "high", "low"]] highp = h["high"].values lowp = h["low"].values openp = h["open"].values closep = h["close"].values lr = LinearRegression() x = np.atleast_2d(np.linspace(0, len(closep), len(closep))).T lr.fit(x, closep) LinearRegression(copy_X=True, fit_intercept=True, n_jobs=1, normalize=False) xt = np.atleast_2d(np.linspace(0, len(closep) + 200, len(closep) + 200)).T yt = lr.predict(xt) bV = [] bP = [] for i in range(1, len(highp) - 1): if highp[i] <= highp[i - 1] and highp[i] < highp[i + 1] and lowp[i] <= lowp[i - 1] and lowp[i] < lowp[i + 1]: bV.append(lowp[i]) bP.append(i) d, p = LIS(bV) idx = [] for i in range(len(p)): idx.append(bP[p[i]]) lr = LinearRegression() X = np.atleast_2d(np.array(idx)).T Y = np.array(d) lr.fit(X, Y) estV = lr.predict(xt) ax6.plot(closep, linewidth=2) ax6.plot(idx, d, "ko") ax6.plot(xt, estV, "-r", linewidth=3) ax6.plot(xt, yt, "-g", linewidth=3) plt.grid(True) # plt.tight_layout() zp2 = zoompan.ZoomPan() figZoom = zp2.zoom_factory(ax6, base_scale=scale) figPan = zp2.pan_factory(ax6) show()
import h5py from pylab import plt def collect_results(outs_files, key): outs = [] for file in outs_files: with h5py.File(file, 'r') as fid: out = fid[key][...] outs.append(out) return outs files = sorted(glob.glob('../outs/ano_??.h5')) nrough1 = collect_results(files, 'regularization/roughening/norm') nres1 = collect_results(files, 'misfit/norm_weighted') files = sorted(glob.glob('../../run0/outs/ano_??.h5')) nrough0 = collect_results(files, 'regularization/roughening/norm') nres0 = collect_results(files, 'misfit/norm_weighted') plt.loglog(nres0, nrough0, '.', label='Result0') plt.loglog(nres1, nrough1, '.', label='Result1') plt.grid('on') plt.xlabel('norm of weighted residual') plt.ylabel('norm of solution roughness') plt.xlim([.7,5]) plt.legend() plt.savefig('compare_misfit.png') plt.show()
def plot_slip_overview(slip, output_file, if_x_log=False, xlim=[0, 1344], ylim = [0,100], yticks = [20, 40, 60], xticks = [1, 10, 100, 1000], xticklabels = [r'$10^0$', r'$10^1$', r'$10^2$', r'$10^3$'], rotation = 45, fontsize = 10, ): num_subflts_strike = slip.num_subflt_along_strike num_subflts_dip = slip.num_subflt_along_dip epochs = slip.get_epochs() fig, axes = plt.subplots(num_subflts_dip, num_subflts_strike, sharex=True, sharey=True) for ii in range(num_subflts_dip): for jj in range(num_subflts_strike): ax = axes[ii][jj] slip_subflt = slip.get_cumu_slip_at_subfault(ii,jj) plt.sca(ax) plt.fill_between(x=epochs, y1=slip_subflt, y2=0, color='r') if if_x_log: ax.set_xscale('log') plt.xlim(xlim) plt.ylim(ylim) plt.grid('on') plt.box('on') plt.tick_params(axis='both',which='both', bottom='off', top='off', left='off', right='off', labelbottom='off', labeltop='off', labelleft='off', labelright='off') fig.subplots_adjust(hspace=0, wspace=0) for ax in axes[-1,::2]: plt.sca(ax) plt.tick_params(axis='x',which='major', bottom='on', top='off', left='off', right='off', labelbottom='on', labeltop='off', labelleft='off', labelright='off') ax.set_xticks(xticks) ax.set_xticklabels(xticklabels, rotation=rotation, fontsize=fontsize) plt.xlabel('day') for ax in axes[0,1::2]: plt.sca(ax) plt.tick_params(axis='x',which='major', bottom='off', top='on', left='off', right='off', labelbottom='off', labeltop='on', labelleft='off', labelright='off') ax.set_xticks(xticks) ax.set_xticklabels(xticklabels, rotation=rotation, fontsize=fontsize) plt.xlabel('day') for ax in axes[::2,0]: plt.sca(ax) plt.tick_params(axis='y',which='major', bottom='off', top='off', left='on', right='off', labelbottom='off', labeltop='off', labelleft='on', labelright='off') ax.set_yticks(yticks) #ax.set_yticklabels(range(0,100,20)) for tick in ax.yaxis.get_major_ticks(): tick.label.set_fontsize(10) tick.label.set_rotation('horizontal') plt.ylabel('slip/m') for ax in axes[::2,-1]: plt.sca(ax) plt.tick_params(axis='y',which='major', bottom='off', top='off', left='off', right='on', labelbottom='off', labeltop='off', labelleft='off', labelright='on') ax.set_yticks(yticks) #ax.set_yticklabels(range(0,100,20)) for tick in ax.yaxis.get_major_ticks(): tick.label.set_fontsize(10) tick.label.set_rotation('horizontal') plt.ylabel('slip/m') ax.yaxis.set_label_position("right") fig.set_size_inches(33,10) plt.savefig(output_file) plt.close()
def freqz(ofb, length_sec=6, ffilt=False, plot=True): """Computes the IR and FRF of a digital filter. Parameters ---------- ofb : FractionalOctaveFilterbank object length_sec : scalar Length of the impulse response test signal. ffilt : bool Backard forward filtering. Effectiv order is doubled then. plot : bool Create Plots or not. Returns ------- x : ndarray Impulse test signal. y : ndarray Impules responses signal of the filters. f : ndarray Frequency vector for the FRF. Y : Frequency response (FRF) of the summed filters. """ from pylab import np, plt, fft, fftfreq x = np.zeros(length_sec*ofb.sample_rate) x[length_sec*ofb.sample_rate/2] = 0.9999 if not ffilt: y, states = ofb.filter_mimo_c(x) y = y[:, :, 0] else: y, states = ofb.filter(x, ffilt=ffilt) s = np.zeros(len(x)) for i in range(y.shape[1]): s += y[:, i] X = fft(y[:, i]) # sampled frequency response f = fftfreq(len(x), 1.0/ofb.sample_rate) if plot: fig = plt.figure('freqz filter bank') plt.grid(True) plt.axis([0, ofb.sample_rate / 2, -100, 5]) L = 20*np.log10(np.abs(X[:len(x)/2]) + 1e-17) plt.semilogx(f[:len(x)/2], L, lw=0.5) plt.hold(True) Y = fft(s) if plot: plt.title('freqz() Filter Bank') plt.xlabel('Frequency / Hz') plt.ylabel('Damping /dB(FS)') plt.xlim((10, ofb.sample_rate/2)) plt.hold(False) plt.figure('sum') L = 20*np.log10(np.abs(Y[:len(x)/2]) + 1e-17) plt.semilogx(f[:len(x)/2], L, lw=0.5) level_input = 10*np.log10(np.sum(x**2)) level_output = 10*np.log10(np.sum(s**2)) plt.axis([5, ofb.sample_rate/1.8, -50, 5]) plt.grid(True) plt.title('Sum of filter bands') plt.xlabel('Frequency / Hz') plt.ylabel('Damping /dB(FS)') print('sum level', level_output, level_input) return x, y, f, Y
def heel_strikes(data, sample_rate, threshold=0.2, order=4, cutoff=5, plot_test=False, t=None): """ Estimate heel strike times between sign changes in accelerometer data. The iGAIT software assumes that the y-axis is anterior-posterior, and restricts some feature extraction to this orientation. In this program, we compute heel strikes for an arbitrary axis. Re: heel strikes (from Yang, et al., 2012): "The heel contacts are detected by peaks preceding the sign change of AP acceleration [3]. In order to automatically detect a heel contact event, firstly, the AP acceleration is low pass filtered by the 4th order zero lag Butterworth filter whose cut frequency is set to 5 Hz. After that, transitional positions where AP acceleration changes from positive to negative can be identified. Finally the peaks of AP acceleration preceding the transitional positions, and greater than the product of a threshold and the maximum value of the AP acceleration are denoted as heel contact events... This threshold is defined as the ratio to the maximum value of the AP acceleration, for example 0.5 indicates the threshold is set at 50% of the maximum AP acceleration. Its default value is set to 0.4 as determined experimentally in this paper, where this value allowed correct detection of all gait events in control subjects. However, when a more irregular pattern is analysed, the threshold should be less than 0.4. The user can test different threshold values and find the best one according to the gait event detection results." Parameters ---------- data : list or numpy array accelerometer data along one axis (preferably forward direction) sample_rate : float sample rate of accelerometer reading (Hz) threshold : float ratio to the maximum value of the anterior-posterior acceleration order : integer order of the Butterworth filter cutoff : integer cutoff frequency of the Butterworth filter (Hz) plot_test : Boolean plot heel strikes? t : list or numpy array accelerometer time points Returns ------- strikes : numpy array of floats heel strike timings strike_indices : list of integers heel strike timing indices Examples -------- >>> from mhealthx.xio import read_accel_json >>> from mhealthx.signals import compute_sample_rate >>> input_file = '/Users/arno/DriveWork/mhealthx/mpower_sample_data/deviceMotion_walking_outbound.json.items-a2ab9333-6d63-4676-977a-08591a5d837f5221783798792869048.tmp' >>> device_motion = True >>> start = 150 >>> t, axyz, gxyz, uxyz, rxyz, sample_rate, duration = read_accel_json(input_file, start, device_motion) >>> ax, ay, az = axyz >>> from mhealthx.extractors.pyGait import heel_strikes >>> threshold = 0.4 >>> order = 4 >>> cutoff = max([1, sample_rate/10]) >>> plot_test = True >>> data = np.abs(ax) + np.abs(ay) + np.abs(az) >>> strikes, strike_indices = heel_strikes(data, sample_rate, threshold, order, cutoff, plot_test, t) """ import numpy as np from mhealthx.signals import compute_interpeak from mhealthx.signals import butter_lowpass_filter, \ crossings_nonzero_pos2neg # Demean data (not in iGAIT): data -= np.mean(data) # Low-pass filter the AP accelerometer data by the 4th order zero lag # Butterworth filter whose cut frequency is set to 5 Hz: filtered = butter_lowpass_filter(data, sample_rate, cutoff, order) # Find transitional positions where AP accelerometer changes from # positive to negative. transitions = crossings_nonzero_pos2neg(filtered) # Find the peaks of AP acceleration preceding the transitional positions, # and greater than the product of a threshold and the maximum value of # the AP acceleration: strike_indices_smooth = [] filter_threshold = np.abs(threshold * np.max(filtered)) for i in range(1, np.size(transitions)): segment = range(transitions[i-1], transitions[i]) imax = np.argmax(filtered[segment]) if filtered[segment[imax]] > filter_threshold: strike_indices_smooth.append(segment[imax]) # Compute number of samples between peaks using the real part of the FFT: interpeak = compute_interpeak(data, sample_rate) decel = np.int(interpeak / 2) # Find maximum peaks close to maximum peaks of smoothed data: strike_indices = [] for ismooth in strike_indices_smooth: istrike = np.argmax(data[ismooth - decel:ismooth + decel]) istrike = istrike + ismooth - decel strike_indices.append(istrike) if plot_test: from pylab import plt if t: tplot = np.asarray(t) tplot -= tplot[0] else: tplot = np.linspace(0, np.size(data), np.size(data)) plt.plot(tplot, data, 'k-', linewidth=2, label='data') plt.plot(tplot, filtered, 'b-', linewidth=1, label='filtered data') plt.plot(tplot[transitions], filtered[transitions], 'ko', linewidth=1, label='transition points') plt.plot(tplot[strike_indices_smooth], filtered[strike_indices_smooth], 'bs', linewidth=1, label='heel strikes') plt.plot(tplot[strike_indices], data[strike_indices], 'rs', linewidth=1, label='heel strikes') plt.xlabel('Time (s)') plt.grid() plt.legend(loc='lower left', shadow=True) plt.show() strikes = np.asarray(strike_indices) strikes -= strikes[0] strikes = strikes / sample_rate return strikes, strike_indices
ts += [t1,t2] ys1 = [] for yi in mean_percentage_Easlip: ys1 += [yi,yi] plt.fill_between(ts, ys1, np.zeros_like(ys1), color='blue') ys2 = [] for yi in mean_percentage_Rco: ys2 += [1-yi, 1-yi] plt.fill_between(ts, ys2, np.ones_like(ys2), color='green') obj = plt.fill_between(ts, ys1, ys2, color='red') plt.grid('off') label_patch1 = mpatches.Patch(color='green') label_patch2 = mpatches.Patch(color='red') label_patch3 = mpatches.Patch(color='blue') plt.legend([label_patch1, label_patch2, label_patch3], [r'$R^{\bf{co}}$', r'$R^{\bf{aslip}}$',r'$E^{\bf{aslip}}$'], bbox_to_anchor=(1.13,1.01)) #plt.gca().set_xscale('log') for epoch in epochs: plt.axvline(epoch,ls='--',color='gray') plt.xlabel('days after the mainshock') plt.ylabel('percentage')
def get_linear_model_histogramDouble(code, ptype='f', dtype='d', start=None, end=None, vtype='close', filter='n', df=None): # 399001','cyb':'zs399006','zxb':'zs399005 # code = '999999' # code = '601608' # code = '000002' # asset = ts.get_hist_data(code)['close'].sort_index(ascending=True) # df = tdd.get_tdx_Exp_day_to_df(code, 'f').sort_index(ascending=True) # vtype='close' # if vtype == 'close' or vtype=='' # ptype= if start is not None and filter == 'y': if code not in ['999999', '399006', '399001']: index_d, dl = tdd.get_duration_Index_date(dt=start) log.debug("index_d:%s dl:%s" % (str(index_d), dl)) else: index_d = cct.day8_to_day10(start) log.debug("index_d:%s" % (index_d)) start = tdd.get_duration_price_date(code, ptype='low', dt=index_d) log.debug("start:%s" % (start)) if df is None: # df = tdd.get_tdx_append_now_df(code, ptype, start, end).sort_index(ascending=True) df = tdd.get_tdx_append_now_df_api(code, ptype, start, end).sort_index(ascending=True) if not dtype == 'd': df = tdd.get_tdx_stock_period_to_type(df, dtype).sort_index(ascending=True) asset = df[vtype] log.info("df:%s" % asset[:1]) asset = asset.dropna() dates = asset.index if not code.startswith('999') or not code.startswith('399'): if code[:1] in ['5', '6', '9']: code2 = '999999' elif code[:1] in ['3']: code2 = '399006' else: code2 = '399001' df1 = tdd.get_tdx_append_now_df_api(code2, ptype, start, end).sort_index(ascending=True) # df1 = tdd.get_tdx_append_now_df(code2, ptype, start, end).sort_index(ascending=True) if not dtype == 'd': df1 = tdd.get_tdx_stock_period_to_type(df1, dtype).sort_index(ascending=True) asset1 = df1.loc[asset.index, vtype] startv = asset1[:1] # asset_v=asset[:1] # print startv,asset_v asset1 = asset1.apply(lambda x: round(x / asset1[:1], 2)) # print asset1[:4] # 画出价格随时间变化的图像 # _, ax = plt.subplots() # fig = plt.figure() fig = plt.figure(figsize=(16, 10)) # fig = plt.figure(figsize=(16, 10), dpi=72) # fig.autofmt_xdate() #(no fact) # plt.subplots_adjust(bottom=0.1, right=0.8, top=0.9) plt.subplots_adjust(left=0.05, bottom=0.08, right=0.95, top=0.95, wspace=0.15, hspace=0.25) # set (gca,'Position',[0,0,512,512]) # fig.set_size_inches(18.5, 10.5) # fig=plt.fig(figsize=(14,8)) ax1 = fig.add_subplot(321) # asset=asset.apply(lambda x:round( x/asset[:1],2)) ax1.plot(asset) # ax1.plot(asset1,'-r', linewidth=2) ticks = ax1.get_xticks() # start, end = ax1.get_xlim() # print start, end, len(asset) # print ticks, ticks[:-1] # (ticks[:-1] if len(asset) > end else np.append(ticks[:-1], len(asset) - 1)) ax1.set_xticklabels([dates[i] for i in (np.append(ticks[:-1], len(asset) - 1))], rotation=15) # Label x-axis with dates # 拟合 X = np.arange(len(asset)) x = sm.add_constant(X) model = regression.linear_model.OLS(asset, x).fit() a = model.params[0] b = model.params[1] # log.info("a:%s b:%s" % (a, b)) log.info("X:%s a:%s b:%s" % (len(asset), a, b)) Y_hat = X * b + a # 真实值-拟合值,差值最大最小作为价值波动区间 # 向下平移 i = (asset.values.T - Y_hat).argmin() c_low = X[i] * b + a - asset.values[i] Y_hatlow = X * b + a - c_low # 向上平移 i = (asset.values.T - Y_hat).argmax() c_high = X[i] * b + a - asset.values[i] Y_hathigh = X * b + a - c_high plt.plot(X, Y_hat, 'k', alpha=0.9); plt.plot(X, Y_hatlow, 'r', alpha=0.9); plt.plot(X, Y_hathigh, 'r', alpha=0.9); # plt.xlabel('Date', fontsize=12) plt.ylabel('Price', fontsize=12) plt.title(code + " | " + str(dates[-1])[:11], fontsize=14) plt.legend([asset.iat[-1]], fontsize=12, loc=4) plt.grid(True) # plt.legend([code]); # plt.legend([code, 'Value center line', 'Value interval line']); # fig=plt.fig() # fig.figsize = [14,8] scale = 1.1 zp = zoompan.ZoomPan() figZoom = zp.zoom_factory(ax1, base_scale=scale) figPan = zp.pan_factory(ax1) ax2 = fig.add_subplot(323) # ax2.plot(asset) # ticks = ax2.get_xticks() ax2.set_xticklabels([dates[i] for i in (np.append(ticks[:-1], len(asset) - 1))], rotation=15) # plt.plot(X, Y_hat, 'k', alpha=0.9) n = 5 d = (-c_high + c_low) / n c = c_high while c <= c_low: Y = X * b + a - c plt.plot(X, Y, 'r', alpha=0.9); c = c + d # asset=asset.apply(lambda x:round(x/asset[:1],2)) ax2.plot(asset) # ax2.plot(asset1,'-r', linewidth=2) # plt.xlabel('Date', fontsize=12) plt.ylabel('Price', fontsize=12) plt.grid(True) # plt.title(code, fontsize=14) # plt.legend([code]) # 将Y-Y_hat股价偏离中枢线的距离单画出一张图显示,对其边界线之间的区域进行均分,大于0的区间为高估,小于0的区间为低估,0为价值中枢线。 ax3 = fig.add_subplot(322) # distance = (asset.values.T - Y_hat) distance = (asset.values.T - Y_hat)[0] if code.startswith('999') or code.startswith('399'): ax3.plot(asset) plt.plot(distance) ticks = ax3.get_xticks() ax3.set_xticklabels([dates[i] for i in (np.append(ticks[:-1], len(asset) - 1))], rotation=15) n = 5 d = (-c_high + c_low) / n c = c_high while c <= c_low: Y = X * b + a - c plt.plot(X, Y - Y_hat, 'r', alpha=0.9); c = c + d ax3.plot(asset) # plt.xlabel('Date', fontsize=12) plt.ylabel('Price-center price', fontsize=14) plt.grid(True) else: as3 = asset.apply(lambda x: round(x / asset[:1], 2)) ax3.plot(as3) ax3.plot(asset1, '-r', linewidth=2) plt.grid(True) zp3 = zoompan.ZoomPan() figZoom = zp3.zoom_factory(ax3, base_scale=scale) figPan = zp3.pan_factory(ax3) # plt.title(code, fontsize=14) # plt.legend([code]) # 统计出每个区域内各股价的频数,得到直方图,为了更精细的显示各个区域的频数,这里将整个边界区间分成100份。 ax4 = fig.add_subplot(325) log.info("assert:len:%s %s" % (len(asset.values.T - Y_hat), (asset.values.T - Y_hat)[0])) # distance = map(lambda x:int(x),(asset.values.T - Y_hat)/Y_hat*100) # now_distanse=int((asset.iat[-1]-Y_hat[-1])/Y_hat[-1]*100) # log.debug("dis:%s now:%s"%(distance[:2],now_distanse)) # log.debug("now_distanse:%s"%now_distanse) distance = (asset.values.T - Y_hat) now_distanse = asset.iat[-1] - Y_hat[-1] # distance = (asset.values.T-Y_hat)[0] pd.Series(distance).plot(kind='hist', stacked=True, bins=100) # plt.plot((asset.iat[-1].T-Y_hat),'b',alpha=0.9) plt.axvline(now_distanse, hold=None, label="1", color='red') # plt.axhline(now_distanse,hold=None,label="1",color='red') # plt.axvline(asset.iat[0],hold=None,label="1",color='red',linestyle="--") plt.xlabel('Undervalue ------------------------------------------> Overvalue', fontsize=12) plt.ylabel('Frequency', fontsize=14) # plt.title('Undervalue & Overvalue Statistical Chart', fontsize=14) plt.legend([code, asset.iat[-1], str(dates[-1])[5:11]], fontsize=12) plt.grid(True) # plt.show() # import os # print(os.path.abspath(os.path.curdir)) ax5 = fig.add_subplot(326) # fig.figsize=(5, 10) log.info("assert:len:%s %s" % (len(asset.values.T - Y_hat), (asset.values.T - Y_hat)[0])) # distance = map(lambda x:int(x),(asset.values.T - Y_hat)/Y_hat*100) distance = (asset.values.T - Y_hat) / Y_hat * 100 now_distanse = ((asset.iat[-1] - Y_hat[-1]) / Y_hat[-1] * 100) log.debug("dis:%s now:%s" % (distance[:2], now_distanse)) log.debug("now_distanse:%s" % now_distanse) # n, bins = np.histogram(distance, 50) # print n, bins[:2] pd.Series(distance).plot(kind='hist', stacked=True, bins=100) # plt.plot((asset.iat[-1].T-Y_hat),'b',alpha=0.9) plt.axvline(now_distanse, hold=None, label="1", color='red') # plt.axhline(now_distanse,hold=None,label="1",color='red') # plt.axvline(asset.iat[0],hold=None,label="1",color='red',linestyle="--") plt.xlabel('Undervalue ------------------------------------------> Overvalue', fontsize=14) plt.ylabel('Frequency', fontsize=12) # plt.title('Undervalue & Overvalue Statistical Chart', fontsize=14) plt.legend([code, asset.iat[-1]], fontsize=12) plt.grid(True) ax6 = fig.add_subplot(324) h = df.loc[:, ['open', 'close', 'high', 'low']] highp = h['high'].values lowp = h['low'].values openp = h['open'].values closep = h['close'].values lr = LinearRegression() x = np.atleast_2d(np.linspace(0, len(closep), len(closep))).T lr.fit(x, closep) LinearRegression(copy_X=True, fit_intercept=True, n_jobs=1, normalize=False) xt = np.atleast_2d(np.linspace(0, len(closep) + 200, len(closep) + 200)).T yt = lr.predict(xt) bV = [] bP = [] for i in range(1, len(highp) - 1): if highp[i] <= highp[i - 1] and highp[i] < highp[i + 1] and lowp[i] <= lowp[i - 1] and lowp[i] < lowp[i + 1]: bV.append(lowp[i]) bP.append(i) d, p = LIS(bV) idx = [] for i in range(len(p)): idx.append(bP[p[i]]) lr = LinearRegression() X = np.atleast_2d(np.array(idx)).T Y = np.array(d) lr.fit(X, Y) estV = lr.predict(xt) ax6.plot(closep, linewidth=2) ax6.plot(idx, d, 'ko') ax6.plot(xt, estV, '-r', linewidth=3) ax6.plot(xt, yt, '-g', linewidth=3) plt.grid(True) # plt.tight_layout() zp2 = zoompan.ZoomPan() figZoom = zp2.zoom_factory(ax6, base_scale=scale) figPan = zp2.pan_factory(ax6) # plt.ion() plt.show(block=False)