def test_run(self): plt.style.use('seaborn') mpl.rcParams['font.family'] = 'serif' # 生成市场环境 me_gbm = MarketEnvironment('me_gbm', dt.datetime(2020, 1, 1)) me_gbm.add_constant('initial_value', 36.0) me_gbm.add_constant('volatility', 0.2) me_gbm.add_constant('final_date', dt.datetime(2020, 12, 31)) me_gbm.add_constant('currency', 'EUR') me_gbm.add_constant('frequency', 'M') me_gbm.add_constant('paths', 1000) csr = ConstantShortRate('csr', 0.06) me_gbm.add_curve('discount_curve', csr) # 生成几何布朗运动模拟类 gbm = GeometricBrownianMotion('gbm', me_gbm) gbm.generate_time_grid() print('时间节点:{0};'.format(gbm.time_grid)) paths_1 = gbm.get_instrument_values() print('paths_1: {0};'.format(paths_1.round(3))) gbm.update(volatility=0.5) paths_2 = gbm.get_instrument_values() # 可视化结果 plt.figure(figsize=(10, 6)) p1 = plt.plot(gbm.time_grid, paths_1[:, :10], 'b') p2 = plt.plot(gbm.time_grid, paths_2[:, :10], 'r-') legend1 = plt.legend([p1[0], p2[0]], ['low volatility', 'high volatility'], loc=2) plt.gca().add_artist(legend1) plt.xticks(rotation=30) plt.show()
def test_run(self): # 生成几何布朗运动市场环境 me_gbm = MarketEnvironment('me_gbm', dt.datetime(2020, 1, 1)) me_gbm.add_constant('initial_value', 36.0) me_gbm.add_constant('volatility', 0.2) me_gbm.add_constant('final_date', dt.datetime(2020, 12, 31)) me_gbm.add_constant('currency', 'EUR') me_gbm.add_constant('frequency', 'M') me_gbm.add_constant('paths', 1000) csr = ConstantShortRate('csr', 0.06) me_gbm.add_curve('discount_curve', csr) # 生成几何布朗运动模拟类 gbm = GeometricBrownianMotion('gbm', me_gbm) gbm.generate_time_grid() # 生成跳跃扩散市场环境 me_jd = MarketEnvironment('me_jd', dt.datetime(2020, 1, 1)) me_jd.add_constant('lambda', 0.3) me_jd.add_constant('mu', -0.75) me_jd.add_constant('delta', 0.1) me_jd.add_environment(me_gbm) # 生成跳跃扩散模拟类 jd = JumpDiffusion('jd', me_jd) paths_3 = jd.get_instrument_values() jd.update(lamb=0.9) paths_4 = jd.get_instrument_values() # 绘制图形 plt.figure(figsize=(10, 6)) p1 = plt.plot(gbm.time_grid, paths_3[:, :10], 'b') p2 = plt.plot(gbm.time_grid, paths_4[:, :10], 'r-') lengend1 = plt.legend([p1[0], p2[0]], ['low intensity', 'high intensity'], loc=3) plt.gca().add_artist(lengend1) plt.xticks(rotation=30) plt.show()
def test_dip(self): xf = arange(0, 425) dips = self.fm.get_dip(xf) plt.plot(xf,dips) plt.grid('on') plt.gca().set_xticks(self.fm.Y_PC) plt.ylim([0, 30]) plt.gca().invert_yaxis() plt.savefig(join(self.outs_dir, '~y_fc_dips.png')) plt.close()
def plot_mat(self, mat, fn): plt.matshow(asarray(mat.todense())) plt.axis('equal') sh = mat.shape plt.gca().set_yticks(range(0, sh[0])) plt.gca().set_xticks(range(0, sh[1])) plt.grid('on') plt.colorbar() plt.savefig(join(self.outs_dir, fn)) plt.close()
def test_dip(self): xf = arange(0, 425) dips = self.fm.get_dip(xf) plt.plot(xf, dips) plt.grid('on') plt.gca().set_xticks(self.fm.Y_PC) plt.ylim([0, 30]) plt.gca().invert_yaxis() plt.savefig(join(self.outs_dir, '~y_fc_dips.png')) plt.close()
def plot_mat(self, mat, fn): plt.matshow(asarray(mat.todense())) plt.axis('equal') sh = mat.shape plt.gca().set_yticks(range(0,sh[0])) plt.gca().set_xticks(range(0,sh[1])) plt.grid('on') plt.colorbar() plt.savefig(join(self.outs_dir, fn)) plt.close()
def biaozhukedu(dfc, weibiao): if weibiao == dfc.index.max(): kedus = [dfc.loc[weibiao]] else: kedus = [dfc.loc[weibiao], dfc.loc[dfc.index.max()]] # print(type(kedus[0])) for ii in range(len(kedus)): kedu = kedus[ii] if (len(dfc.index)) > 12: idx = kedu.name else: idx = list(dfc.index).index(kedu.name) if not np.isnan(kedu.iloc[0]): plt.plot([idx, idx], [0, kedu.iloc[0]], 'c--') plt.annotate(str(kedu.name), xy=(idx, 0), xycoords='data', xytext=(-20, -20), textcoords='offset points', color='r', arrowprops=dict(arrowstyle="->", connectionstyle="arc3,rad=0")) for i in range(len(kedu)): if np.isnan(kedu.iloc[i]): # print(kedu.iloc[i]) # print(type(kedu.iloc[i])) continue plt.scatter([ idx, ], [kedu.iloc[i]], 50, color='Wheat') # global ywananchor if kedu.map(lambda x: abs(x)).max() >= ywananchor: kedubiaozhi = "%.1f万" % (kedu.iloc[i] / 10000) plt.gca().yaxis.set_major_formatter( FuncFormatter(lambda x, pos: "%d万" % int(x / 10000)) ) # 纵轴主刻度文本用y_formatter函数计算 else: kedubiaozhi = "%d" % kedu.iloc[i] fontsize = 8 if (i % 2) == 0: zhengfu = -1 else: zhengfu = 0.4 plt.annotate( kedubiaozhi, xy=(idx, kedu.iloc[i]), xycoords='data', xytext=(len(kedubiaozhi) * fontsize * zhengfu, int(len(kedubiaozhi) * fontsize * (-1) * zhengfu / 2)), textcoords='offset points', fontsize=fontsize, arrowprops=dict(arrowstyle="->", connectionstyle="arc3,rad=.2", color='Purple'))
def drawnow(self): plt.cla() imshow(self.img) axes = plt.gca() for i1 in range(0, len(self.poly_draw)): axes.add_patch(self.poly_draw[i1]) plt.gca().invert_yaxis() plt.draw()
def __init__(self, ref_params, save_state_freq=500, overwrite_state=True, plot=False): """ :param ref_params: instance of refinement Parameters (LMP in code below) :param save_state_freq: how often to save all models (will be overwritten each time) """ num_params = len(ref_params) self.vary = np.zeros(num_params).astype(bool) for p in ref_params.values(): self.vary[p.xpos] = not p.fix self.x0 = np.ones(num_params) self.g = None self.ref_params = ref_params self.iternum = 0 self.all_times = [] self.save_state_freq = save_state_freq self.overwrite_state = overwrite_state self.med_offsets = [ ] # median prediction offsets(new number gets added everytime write_output_files is called) self.med_iternums = [] self.plot = plot and COMM.rank == 0 if self.plot: self.fig = plt.figure() self.ax = plt.gca() plt.draw() plt.pause(0.1)
def plot_2d_mixing_space(self, features, hold=False): ''' Draws a 2D (triangular) mixing space. ''' codes = [ VectorPath.MOVETO, VectorPath.LINETO, VectorPath.LINETO, VectorPath.CLOSEPOLY ] verts = features[..., 0:2].tolist() verts.append((0, 0)) # Dummy vertex path = VectorPath(verts, codes) patch = patches.PathPatch(path, facecolor='black', alpha=0.3, lw=0) plt.gca().add_patch(patch) if not hold: plt.show()
def cumulative_freq_plot(rast, band=0, mask=None, bins=100, xlim=None, nodata=-9999): ''' Plots an empirical cumulative frequency curve for the input raster array in a given band. ''' if mask is not None: arr = binary_mask(rast, mask) else: arr = rast.copy() if nodata is not None: arr = subarray(arr) values, base = np.histogram(arr, bins=bins) cumulative = np.cumsum(values) # Evaluate the cumulative distribution plt.plot(base[:-1], cumulative, c='blue') # Plot the cumulative function plt.set_title('Empirical Cumulative Distribution: Band %d' % band) if xlim is not None: axes = plt.gca() axes.set_xlim(xlim) plt.show() return arr
def plot(filename='area_and_overlap.csv'): """ Quick and dirty plotting of the data output by area_and_overlap_csv. Would likely want to do this more interactively, so consider this a record of plotting done and a starting point for more plotting. You will likely want to invoke ipython with the --matplotlib switch and the backend of your choice (we used tk): ipython --matplotlib tk """ from matplotlib import pyplot from pylab import plt, show in_file = csv.reader(open(filename, 'rb')) areas = [] overlaps = [] for row in in_file: try: areas.append(float(row[0])) overlaps.append(float(row[1])) except Exception: print row[0], row[1] fig = pyplot.figure() ax = plt.gca() ax.set_ylabel('parcel area (sq ft, log)') ax.set_yscale('log') ax.set_xlabel('% of parcel that overlaps with building polygons') ax.plot(overlaps, areas, 'o', c='blue', alpha=0.05, markeredgecolor='none') show()
def plotData(data, label_x, label_y, label_pos, label_neg, axes=None, is_show=True): # 获得正负样本的下标(即哪些是正样本,哪些是负样本) #如果第3列为0则赋值给neg neg = data[:, 2] == 0 #如果第3列为1则赋值给pos pos = data[:, 2] == 1 if axes == None: axes = plt.gca() axes.scatter(data[pos][:, 0], data[pos][:, 1], marker='+', c='k', s=8, linewidth=6, label=label_pos) axes.scatter(data[neg][:, 0], data[neg][:, 1], c='y', s=8, label=label_neg) #显示x轴 axes.set_xlabel(label_x) #显示y轴 axes.set_ylabel(label_y) #显示标识位置 axes.legend(frameon=True, fancybox=True) #axes.legend(loc='center left',bbox_to_anchor=(0.2,1.12),ncol=3) if is_show: show()
def axis_ij(g=None): if g is None: g = _plt.gca() bottom, top = g.get_ylim() if top>bottom: g.set_ylim(top, bottom) else: pass
def axis_ij(g=None): if g is None: g = _plt.gca() bottom, top = g.get_ylim() if top > bottom: g.set_ylim(top, bottom) else: pass
def plot_data(data, label_x, label_y): axes = plt.gca() axes.scatter(data[:, 2], data[:, 0], marker='+', c='k', s=8) axes.scatter(data[:, 2], data[:, 1], c='y', s=8) axes.set_xlabel(label_x) axes.set_ylabel(label_y) axes.legend(frameon=True, fancybox=True) show()
def show_data(list_dat, num=4): from pylab import plt for dat in np.random.choice(list_dat, num): print dat im=cv2.imread(dat['filepath'])[:,:,::-1] plt.figure(1) plt.imshow(im) for bbox in dat['bboxes']: plt.gca().add_patch(plt.Rectangle((bbox['x1'], bbox['y1']), bbox['x2'] - bbox['x1'], bbox['y2'] - bbox['y1'], fill=False, edgecolor='red', linewidth=1) ) for idx, bbox in enumerate(dat['bboxes']): ann = np.array(Image.open(bbox['ann_path'])) if len(ann.shape)==3: ann = ann[:,:,0] # Make sure ann is a two dimensional np array. plt.figure(11+idx) plt.imshow(ann) plt.show()
def _plot_base(dep, val, deplim_small, xlim_small, xlabel): plt.subplot(1,2,1) plt.plot(val, dep) plt.gca().invert_yaxis() plt.grid('on') plt.ylabel('depth/km') plt.xlabel(xlabel) locs, labels = plt.xticks() plt.setp(labels, rotation=-45) plt.subplot(1,2,2) plt.plot(val, dep) plt.gca().invert_yaxis() plt.grid('on') plt.ylim(deplim_small) plt.xlim(xlim_small) plt.xlabel(xlabel) locs, labels = plt.xticks() plt.setp(labels, rotation=-45)
def _plot_base(dep, val, deplim_small, xlim_small, xlabel): plt.subplot(1, 2, 1) plt.plot(val, dep) plt.gca().invert_yaxis() plt.grid('on') plt.ylabel('depth/km') plt.xlabel(xlabel) locs, labels = plt.xticks() plt.setp(labels, rotation=-45) plt.subplot(1, 2, 2) plt.plot(val, dep) plt.gca().invert_yaxis() plt.grid('on') plt.ylim(deplim_small) plt.xlim(xlim_small) plt.xlabel(xlabel) locs, labels = plt.xticks() plt.setp(labels, rotation=-45)
def show_prediction_result(image, label_image, clf): size = (8, 8) plt.figure(figsize=(15, 10)) plt.imshow(image, cmap='gray_r') candidates = [] predictions = [] for region in regionprops(label_image): # skip small images # if region.area < 100: # continue # draw rectangle around segmented coins minr, minc, maxr, maxc = region.bbox # make regions square maxwidth = np.max([maxr - minr, maxc - minc]) minr, maxr = int(0.5 * ((maxr + minr) - maxwidth)) - 3, int(0.5 * ((maxr + minr) + maxwidth)) + 3 minc, maxc = int(0.5 * ((maxc + minc) - maxwidth)) - 3, int(0.5 * ((maxc + minc) + maxwidth)) + 3 rect = mpatches.Rectangle((minc, minr), maxc - minc, maxr - minr, fill=False, edgecolor='red', linewidth=2, alpha=0.2) plt.gca().add_patch(rect) # predict digit candidate = image[minr:maxr, minc:maxc] candidate = np.array(imresize(candidate, size), dtype=float) # invert # candidate = np.max(candidate) - candidate # print im # rescale to 16 in integer candidate = (candidate - np.min(candidate)) if np.max(candidate) == 0: continue candidate /= np.max(candidate) candidate[candidate < 0.2] = 0.0 candidate *= 16 candidate = np.array(candidate, dtype=int) prediction = clf.predict(candidate.reshape(-1)) candidates.append(candidate) predictions.append(prediction) plt.text(minc - 10, minr - 10, "{}".format(prediction), fontsize=50) plt.xticks([], []) plt.yticks([], []) plt.tight_layout() plt.show() return candidates, predictions
def draw_satTrail_multicolor(lons, lats, colormap, Trail_Width=1): ''' 画卫星轨迹,根据colormap使颜色渐变 ''' t = np.linspace(0, len(lons), len(lons)) lons_tmp = [] lats_tmp = [] lon_old = lons[0] ti = 0 for i in xrange(len(lons)): if abs(lons[i] - lon_old) >= 180.: # 轨迹每次过精度180,就分割画一次 points = np.array([lons_tmp, lats_tmp]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) lc = LineCollection( segments, cmap=plt.get_cmap(colormap), # YlGnBu norm=plt.Normalize(0, len(lons))) lc.set_array(t[ti:i]) lc.set_linewidth(Trail_Width) plt.gca().add_collection(lc) # plt.plot(lons_tmp, lats_tmp, '-', linewidth=Trail_Width, c=color) lons_tmp = [] lats_tmp = [] ti = i lon_old = lons[i] lons_tmp.append(lons[i]) lats_tmp.append(lats[i]) # plt.plot(lons_tmp, lats_tmp, '-', linewidth=Trail_Width, c=color) points = np.array([lons_tmp, lats_tmp]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) lc = LineCollection( segments, cmap=plt.get_cmap(colormap), # YlGnBu norm=plt.Normalize(0, len(lons))) lc.set_array(t[ti:i]) lc.set_linewidth(Trail_Width) plt.gca().add_collection(lc)
def show_data(list_dat, num=4): from pylab import plt for dat in np.random.choice(list_dat, num): print dat im = cv2.imread(dat['filepath'])[:, :, ::-1] plt.figure(1) plt.imshow(im) for bbox in dat['bboxes']: plt.gca().add_patch( plt.Rectangle((bbox['x1'], bbox['y1']), bbox['x2'] - bbox['x1'], bbox['y2'] - bbox['y1'], fill=False, edgecolor='red', linewidth=1)) for idx, bbox in enumerate(dat['bboxes']): ann = cv2.imread(bbox['ann_path'], cv2.IMREAD_GRAYSCALE) plt.figure(11 + idx) plt.imshow(ann) plt.show()
def draw_spectrum(data_list): T = 3600 amp_spec, power_spec, freq = spectrum(data_list, T) print('Max amp in spectrum: {np.max(amp_spec)}') plt.figure(figsize=(18, 5)) plt.subplot(131) x = list(range(len(data_list))) y = data_list plt.title("Observation wind data of Kyoto") plt.xlabel('Hours') plt.ylabel('Observation wind data of Kyoto') plt.plot(x, y, color='green') data_len = len(x) plt.subplot(132) plt.title("Power Spectrum of Wind ") x = freq[int(data_len / 2):] y = power_spec[int(data_len / 2):] # set 0 to 0Hz (DC component) y[0] = 0 plt.xlabel('Frequency (Hz)') plt.ylabel('Intensity') plt.plot(x, y, color='orange') ax = plt.gca() x = x[1:] y = y[1:] ax.xaxis.set_major_formatter(mtick.FormatStrFormatter('%.0e')) coeffs = np.polyfit(np.log(x), np.log(y), 1) beta = -coeffs[0] dimension = 1 + (3 - beta) / 2 print(beta) print("The fractal dimension is", dimension) plt.subplot(133) plt.title("the Curve of log(power-spectrum) and log(frequency)") plt.scatter(np.log(x), np.log(y), marker='o', s=10, c=list(range(len(x)))) # plt.plot(np.log(x), np.log(y), 'o', mfc='none') plt.plot(np.log(x), np.polyval(coeffs, np.log(x))) plt.xlabel('log freq') plt.ylabel('log intensity') plt.savefig("../pics/kyoto_wind.png") plt.show()
def plot(self): fig = plt.figure(figsize=self.figsize) fig.patch.set_facecolor('white') fig.patch.set_alpha(0) ax = plt.gca() ax.set_title("Choices \n", fontsize=self.title_size) labels = [ "Choice 0 -> 1", "Choice 1 -> 0", "Choice 1 -> 2", "Choice 2 -> 1", "Choice 2 -> 0", "Choice 0 -> 2", ] line_styles = ["-", "-", "-", "-", "-", "-"] markers = [4, 5, 4, 5, 4, 5] colors = ["red", "red", "blue", "blue", "green", "green"] for i, y in enumerate(self.Ys): ax.plot(self.X, y, label=labels[i], linewidth=self.line_width, color=colors[i], linestyle=line_styles[i], marker=markers[i]) ax.legend(bbox_to_anchor=(0.95, 1.19), loc=2, fontsize=self.legend_font_size, frameon=False) ax.tick_params(axis='both', which='major', labelsize=self.label_value_size) ax.set_xlabel("t", fontsize=self.label_font_size) ax.set_ylabel("n", fontsize=self.label_font_size) ax.spines['right'].set_color('none') ax.yaxis.set_ticks_position('left') ax.xaxis.set_ticks_position('bottom') ax.spines['top'].set_color('none') plt.savefig(self.fig_name) plt.close()
def plot(self): fig = plt.figure(figsize=self.figsize) fig.patch.set_facecolor('white') # fig.patch.set_alpha(0) ax = plt.gca() ax.set_title("Rewards distribution\n", fontsize=self.title_size) ax.bar(self.X, self.Y, self.bar_width, tick_label=self.X, color="grey") ax.tick_params(axis='both', which='major', labelsize=self.label_value_size) plt.savefig(self.fig_name) plt.close()
def plot(self): fig = plt.figure(figsize=self.figsize) fig.patch.set_facecolor('white') # fig.patch.set_alpha(0) ax = plt.gca() ax.set_title("Markets attendance \n", fontsize=self.title_size) labels = [ "Market 0 -> 1 / 1 -> 0", "Market 1 -> 2 / 2 -> 1", "Market 2 -> 0 / 0 -> 2", ] line_styles = ["-", "--", ":"] for i, y in enumerate(self.Ys): ax.plot(self.X, y, label=labels[i], linewidth=self.line_width, color="black", linestyle=line_styles[i]) ax.legend(bbox_to_anchor=(0.8, 1.19), loc=2, fontsize=self.legend_font_size, frameon=False) ax.tick_params(axis='both', which='major', labelsize=self.label_value_size) ax.set_xlabel("t", fontsize=self.label_font_size) ax.set_ylabel("n", fontsize=self.label_font_size) ax.spines['right'].set_color('none') ax.yaxis.set_ticks_position('left') ax.xaxis.set_ticks_position('bottom') ax.spines['top'].set_color('none') plt.savefig(self.fig_name) plt.close()
def plot_fault_framework(fault_framework): fm = fault_framework plt.plot(fm.Y_PC, fm.DEP, '-o') plt.axis('equal') plt.axhline(0, color='black') plt.gca().set_yticks(fm.DEP) plt.gca().set_xticks(fm.Y_PC) plt.grid('on') plt.xlabel('From trench to continent(km)') plt.ylabel('depth (km)') for xi, yi, dip in zip(fm.Y_PC, fm.DEP, fm.DIP_D): plt.text(xi, yi, 'dip = %.1f' % dip) plt.gca().invert_yaxis()
def plot_fault_framework(fault_framework): fm = fault_framework plt.plot(fm.Y_PC, fm.DEP, '-o') plt.axis('equal') plt.axhline(0, color='black') plt.gca().set_yticks(fm.DEP) plt.gca().set_xticks(fm.Y_PC) plt.grid('on') plt.xlabel('From trench to continent(km)') plt.ylabel('depth (km)') for xi, yi, dip in zip(fm.Y_PC, fm.DEP, fm.DIP_D): plt.text(xi, yi, 'dip = %.1f'%dip) plt.gca().invert_yaxis()
def plotNet(net, colors, widths, options): shapes = [] c = [] w = [] for e in net._edges: shapes.append(e.getShape()) if e._id in colors: c.append(colors[str(e._id)]) else: c.append(options.defaultColor) if e._id in widths: w.append(widths[str(e._id)]) else: w.append(options.defaultWidth) line_segments = LineCollection(shapes, linewidths=w, colors=c) ax = plt.gca() ax.add_collection(line_segments) ax.set_xmargin(0.1) ax.set_ymargin(0.1) ax.autoscale_view(True, True, True)
def plot_L_curve( files, nlin_pars=['log10_He_', 'log10_visM_', 'rake'], nlin_pars_ylabels=[r'$log_{10}(He)$', r'$log_{10}(visM)$', 'rake'], ): nreses = collect_from_result_files(files, 'residual_norm_weighted') nroughs = collect_from_result_files(files, 'roughening_norm') num_subplots = 1 + len(nlin_pars) x1 = amin(nreses) x2 = amax(nreses) dx = x2 - x1 xlim = (x1 - dx * 0.02, x2 + dx * 0.2) xticks = range(int(x1), int(x2), 5) plt.subplot(num_subplots, 1, 1) plt.loglog(nreses, nroughs, 'o-') plt.xlim(xlim) plt.gca().set_xticks(xticks) plt.gca().get_xaxis().set_major_formatter( matplotlib.ticker.ScalarFormatter()) plt.ylabel('roughening') plt.xlabel('Residual Norm') plt.grid('on') nth = 2 for par, par_label in zip(nlin_pars, nlin_pars_ylabels): y = collect_from_result_files(files, par) plt.subplot(num_subplots, 1, nth) plt.semilogx(nreses, y, 'o-') plt.xlim(xlim) plt.gca().set_xticks(xticks) plt.gca().get_xaxis().set_major_formatter( matplotlib.ticker.ScalarFormatter()) plt.ylabel(par_label) plt.xlabel('Residual Norm') plt.grid('on') nth += 1
def test_dep(self): xf = arange(0, 425) deps = self.fm.get_dep(xf) plt.plot(xf, deps) plt.gca().set_yticks(self.fm.DEP) plt.gca().set_xticks(self.fm.Y_PC) plt.grid('on') plt.title('Ground x versus depth') plt.xlabel('Ground X (km)') plt.ylabel('depth (km)') plt.axis('equal') plt.gca().invert_yaxis() plt.savefig(join(self.outs_dir, '~Y_PC_vs_deps.png')) plt.close()
def test_dep(self): xf = arange(0, 425) deps = self.fm.get_dep(xf) plt.plot(xf,deps) plt.gca().set_yticks(self.fm.DEP) plt.gca().set_xticks(self.fm.Y_PC) plt.grid('on') plt.title('Ground x versus depth') plt.xlabel('Ground X (km)') plt.ylabel('depth (km)') plt.axis('equal') plt.gca().invert_yaxis() plt.savefig(join(self.outs_dir, '~Y_PC_vs_deps.png')) plt.close()
def plot_L_curve(files, nlin_pars = ['log10_He_','log10_visM_','rake'], nlin_pars_ylabels = [r'$log_{10}(He)$', r'$log_{10}(visM)$', 'rake'], ): nreses = collect_from_result_files(files, 'residual_norm_weighted') nroughs = collect_from_result_files(files, 'roughening_norm') num_subplots = 1 + len(nlin_pars) x1 = amin(nreses) x2 = amax(nreses) dx = x2 - x1 xlim = (x1-dx*0.02, x2+dx*0.2) xticks = range(int(x1), int(x2),5) plt.subplot(num_subplots,1,1) plt.loglog(nreses, nroughs,'o-') plt.xlim(xlim) plt.gca().set_xticks(xticks) plt.gca().get_xaxis().set_major_formatter(matplotlib.ticker.ScalarFormatter()) plt.ylabel('roughening') plt.xlabel('Residual Norm') plt.grid('on') nth = 2 for par, par_label in zip(nlin_pars, nlin_pars_ylabels): y = collect_from_result_files(files, par) plt.subplot(num_subplots,1,nth) plt.semilogx(nreses, y,'o-') plt.xlim(xlim) plt.gca().set_xticks(xticks) plt.gca().get_xaxis().set_major_formatter(matplotlib.ticker.ScalarFormatter()) plt.ylabel(par_label) plt.xlabel('Residual Norm') plt.grid('on') nth += 1
def show(filename=None, labels=False): if not labels: # fix everything if in 3D mode plt.subplots_adjust(left=0.0, right=1.1, bottom=0.0, top=1.0) # also do this if in 2d mode if not is_3d: frame1 = plt.gca() frame1.axes.get_xaxis().set_visible(False) frame1.axes.get_yaxis().set_visible(False) if legend: plt.legend(loc="upper left", fontsize=8, prop={'family': "Monaco", 'weight': "roman", 'size': "x-small"}) if filename is not None: if '.' not in filename: if not os.path.isdir(filename): os.makedirs(filename) filename = os.path.abspath(os.path.join(filename, "%s.png" % util.timestamp())) figure.savefig(filename, dpi=150, facecolor=figure.get_facecolor(), edgecolor='none') plt.show()
def plot(self): fig = plt.figure(figsize=self.figsize) fig.patch.set_facecolor('white') fig.patch.set_alpha(0) ax = plt.gca() ax.set_title("Consumption\n", fontsize=self.title_size) ax.plot(self.X, self.Y, linewidth=self.line_width, color="black") ax.tick_params(axis='both', which='major', labelsize=self.label_value_size) ax.set_xlabel("t", fontsize=self.label_font_size) ax.set_ylabel("n", fontsize=self.label_font_size) ax.spines['right'].set_color('none') ax.yaxis.set_ticks_position('left') ax.xaxis.set_ticks_position('bottom') ax.spines['top'].set_color('none') plt.savefig(self.fig_name) plt.close()
sites = [ii[0] for ii in tp] disp0 = np.asarray([ii[1] for ii in tp]).flatten() us0 = disp0[2::3] ep = vj.EpochalDisplacement('cumu_post_with_seafloor.h5', filter_sites=sites) disp1 = ep[0] us1 = disp1[2::3] plt.subplot(121) bm = vj.MyBasemap(region_code='near') mplt = vj.MapPlotDisplacement(basemap=bm) mplt.plot_scalor(us0, sites, cmap='RdBu') mplt = vj.MapPlotSlab(basemap=bm) mplt.plot_top() plt.clim([-1., 1.]) plt.subplot(122) bm = vj.MyBasemap(region_code='near') mplt = vj.MapPlotDisplacement(basemap=bm) im = mplt.plot_scalor(us1, sites, cmap='RdBu') mplt = vj.MapPlotSlab(basemap=bm) mplt.plot_top() plt.clim([-1., 1.]) divider = make_axes_locatable(plt.gca()) cax = divider.append_axes("right", size="5%", pad=0.05) plt.colorbar(im, cax=cax) plt.savefig('compare.pdf') plt.show()
def show_chan_mpl(code, start_date, end_date, stock_days, resample, show_mpl=True, least_init=3, chanK_flag=False, windows=20): def get_least_khl_num(resample, idx=0, init_num=3): # init = 3 if init_num - idx > 0: initw = init_num - idx else: initw = 0 return init_num if resample == 'd' else initw if resample == 'w' else init_num-idx-1 if init_num-idx-1 >0 else 0\ if resample == 'm' else 5 stock_code = code # 股票代码 # stock_code = '002176' # 股票代码 # start_date = '2017-09-05' # start_date = None # end_date = '2017-10-12 15:00:00' # 最后生成k线日期 # end_date = None # stock_days = 60 # 看几天/分钟前的k线 # resample = 'd' # resample = 'w' x_jizhun = 3 # window 周期 x轴展示的时间距离 5:日,40:30分钟, 48: 5分钟 least_khl_num = get_least_khl_num(resample, init_num=least_init) # stock_frequency = '5m' # 1d日线, 30m 30分钟, 5m 5分钟,1m 1分钟 stock_frequency = resample # 1d日线, 30m 30分钟, 5m 5分钟,1m 1分钟 w:week # chanK_flag = chanK # True 看缠论K线, False 看k线 # chanK_flag = True # True 看缠论K线, False 看k线 show_mpl = show_mpl def con2Cxianduan(stock, k_data, chanK, frsBiType, biIdx, end_date, cur_ji=1, recursion=False, dl=None, chanK_flag=False, least_init=3): max_k_num = 4 if cur_ji >= 6 or len(biIdx) == 0 or recursion: return biIdx idx = biIdx[len(biIdx) - 1] k_data_dts = list(k_data.index) st_data = chanK['enddate'][idx] if st_data not in k_data_dts: return biIdx # 重构次级别线段的点到本级别的chanK中 def refactorXd(biIdx, xdIdxc, chanK, chanKc, cur_ji): new_biIdx = [] biIdxB = biIdx[len(biIdx) - 1] if len(biIdx) > 0 else 0 for xdIdxcn in xdIdxc: for chanKidx in range(len(chanK.index))[biIdxB:]: if judge_day_bao(chanK, chanKidx, chanKc, xdIdxcn, cur_ji): new_biIdx.append(chanKidx) break return new_biIdx # 判断次级别日期是否被包含 def judge_day_bao(chanK, chanKidx, chanKc, xdIdxcn, cur_ji): _end_date = chanK['enddate'][chanKidx] + datetime.timedelta( hours=15) if cur_ji == 1 else chanK['enddate'][chanKidx] _start_date = chanK.index[chanKidx] if chanKidx == 0\ else chanK['enddate'][chanKidx - 1] + datetime.timedelta(minutes=1) return _start_date <= chanKc.index[xdIdxcn] <= _end_date # cur_ji = 1 #当前级别 # 符合k线根数大于4根 1日级别, 2 30分钟, 3 5分钟, 4 一分钟 if not recursion: resample = 'd' if cur_ji + 1 == 2 else '5m' if cur_ji + 1 == 3 else \ 'd' if cur_ji + 1 == 5 else 'w' if cur_ji + 1 == 6 else 'd' least_khl_num = get_least_khl_num(resample, 1, init_num=least_init) print "次级:%s st_data:%s k_data_dts:%s least_khl_num:%s" % ( len(k_data_dts) - k_data_dts.index(st_data), str(st_data)[:10], len(k_data_dts), least_khl_num) if cur_ji + 1 != 2 and len(k_data_dts) - k_data_dts.index( st_data) >= least_khl_num + 1: frequency = '30m' if cur_ji + 1 == 2 else '5m' if cur_ji + 1 == 3 else '1m' # else: # frequency = 'd' if cur_ji+1==2 else '5m' if cur_ji+1==3 else \ # 'd' if cur_ji+1==5 else 'w' if cur_ji+1==6 else 'd' start_lastday = str(chanK.index[biIdx[-1]])[0:10] print "次级别为:%s cur_ji:%s %s" % (resample, cur_ji, start_lastday) # print [chanK.index[x] for x in biIdx] k_data_c, cname = get_quotes_tdx(stock, start=start_lastday, end=end_date, dl=dl, resample=resample) print k_data_c.index[0], k_data_c.index[-1] chanKc = chan.parse2ChanK( k_data_c, k_data_c.values) if chanK_flag else k_data_c fenTypesc, fenIdxc = chan.parse2ChanFen(chanKc, recursion=True) if len(fenTypesc) == 0: return biIdx biIdxc, frsBiTypec = chan.parse2ChanBi( fenTypesc, fenIdxc, chanKc, least_khl_num=least_khl_num - 1) if len(biIdxc) == 0: return biIdx print "biIdxc:", [round(k_data_c.high[x], 2) for x in biIdxc ], [str(k_data_c.index[x])[:10] for x in biIdxc] xdIdxc, xdTypec = chan.parse2Xianduan( biIdxc, chanKc, least_windows=1 if least_khl_num > 0 else 0) biIdxc = con2Cxianduan(stock, k_data_c, chanKc, frsBiTypec, biIdxc, end_date, cur_ji + 1, recursion=True) print "xdIdxc:%s xdTypec:%s biIdxc:%s" % (xdIdxc, xdTypec, biIdxc) if len(xdIdxc) == 0: return biIdx # 连接线段位为上级别的bi lastBiType = frsBiType if len(biIdx) % 2 == 0 else -frsBiType if len(biIdx) == 0: return refactorXd(biIdx, xdIdxc, chanK, chanKc, cur_ji) lastbi = biIdx.pop() firstbic = xdIdxc.pop(0) # 同向连接 if lastBiType == xdTypec: biIdx = biIdx + refactorXd(biIdx, xdIdxc, chanK, chanKc, cur_ji) # 逆向连接 else: # print '开始逆向连接' _mid = [lastbi] if (lastBiType == -1 and chanK['low'][lastbi] <= chanKc['low'][firstbic])\ or (lastBiType == 1 and chanK['high'][lastbi] >= chanKc['high'][firstbic]) else\ [chanKidx for chanKidx in range(len(chanK.index))[biIdx[len(biIdx) - 1]:] if judge_day_bao(chanK, chanKidx, chanKc, firstbic, cur_ji)] biIdx = biIdx + [_mid[0]] + refactorXd(biIdx, xdIdxc, chanK, chanKc, cur_ji) # print "次级:",len(biIdx),biIdx,[str(k_data_c.index[x])[:10] for x in biIdx] return biIdx def get_quotes_tdx(code, start=None, end=None, dl=120, resample='d', show_name=True): quotes = tdd.get_tdx_append_now_df_api( code=stock_code, start=start, end=end, dl=dl).sort_index(ascending=True) if not resample == 'd' and resample in tdd.resample_dtype: quotes = tdd.get_tdx_stock_period_to_type(quotes, period_day=resample) quotes.index = quotes.index.astype('datetime64') if show_name: if 'name' in quotes.columns: cname = quotes.name[0] # cname_g =cname else: dm = tdd.get_sina_data_df(code) if 'name' in dm.columns: cname = dm.name[0] else: cname = '-' else: cname = '-' if quotes is not None and len(quotes) > 0: quotes = quotes.loc[:, [ 'open', 'close', 'high', 'low', 'vol', 'amount' ]] else: # log.error("quotes is None check:%s"%(code)) raise Exception("Code:%s error, df is None%s" % (code)) return quotes, cname quotes, cname = get_quotes_tdx(stock_code, start_date, end_date, dl=stock_days, resample=resample, show_name=show_mpl) # quotes.rename(columns={'amount': 'money'}, inplace=True) # quotes.rename(columns={'vol': 'vol'}, inplace=True) # print quotes[-2:] # print quotes[:1] # 缠论k线 # open close high low volume money # 2017-05-03 15.69 15.66 15.73 15.53 10557743 165075887 # 2017-05-04 15.66 15.63 15.70 15.52 8343270 130330396 # 2017-05-05 15.56 15.65 15.68 15.41 18384031 285966842 # 2017-05-08 15.62 15.75 15.76 15.54 12598891 197310688 quotes = chan.parse2ChanK(quotes, quotes.values) if chanK_flag else quotes # print quotes[:1].index # print quotes[-1:].index quotes[quotes['vol'] == 0] = np.nan quotes = quotes.dropna() Close = quotes['close'] Open = quotes['open'] High = quotes['high'] Low = quotes['low'] T0 = quotes.index.values # T0 = mdates.date2num(T0) length = len(Close) initial_trend = "down" cur_ji = 1 if stock_frequency == 'd' else \ 2 if stock_frequency == '30m' else \ 3 if stock_frequency == '5m' else \ 4 if stock_frequency == 'w' else \ 5 if stock_frequency == 'm' else 6 log.debug('======笔形成最后一段未完成段判断是否是次级别的走势形成笔=======:%s %s' % (stock_frequency, cur_ji)) x_date_list = quotes.index.values.tolist() # for x_date in x_date_list: # d = datetime.datetime.fromtimestamp(x_date/1000000000) # print d.strftime("%Y-%m-%d %H:%M:%S.%f") # print x_date_list k_data = quotes k_values = k_data.values # 缠论k线 chanK = quotes if chanK_flag else chan.parse2ChanK( k_data, k_values, chan_kdf=chanK_flag) fenTypes, fenIdx = chan.parse2ChanFen(chanK) # log.debug("code:%s fenTypes:%s fenIdx:%s k_data:%s" % (stock_code,fenTypes, fenIdx, len(k_data))) biIdx, frsBiType = chan.parse2ChanBi(fenTypes, fenIdx, chanK, least_khl_num=least_khl_num) # log.debug("biIdx1:%s chanK:%s" % (biIdx, len(chanK))) print("biIdx1:%s %s chanK:%s" % (biIdx, str(chanK.index.values[biIdx[-1]])[:10], len(chanK))) biIdx = con2Cxianduan(stock_code, k_data, chanK, frsBiType, biIdx, end_date, cur_ji, least_init=least_init) # log.debug("biIdx2:%s chanK:%s" % (biIdx, len(biIdx))) chanKIdx = [(chanK.index[x]) for x in biIdx] if len(biIdx) == 0 and len(chanKIdx) == 0: print "BiIdx is None and chanKidx is None:%s" % (code) return None log.debug("con2Cxianduan:%s chanK:%s %s" % (biIdx, len(chanK), chanKIdx[-1] if len(chanKIdx) > 0 else None)) # print quotes['close'].apply(lambda x:round(x,2)) # print '股票代码', get_security_info(stock_code).display_name # print '股票代码', (stock_code), resample, least_khl_num # 3.得到分笔结果,计算坐标显示 def plot_fenbi_seq(biIdx, frsBiType, plt=None, color=None): x_fenbi_seq = [] y_fenbi_seq = [] for i in range(len(biIdx)): if biIdx[i] is not None: fenType = -frsBiType if i % 2 == 0 else frsBiType # dt = chanK['enddate'][biIdx[i]] # 缠论k线 dt = chanK.index[biIdx[i]] if chanK_flag else chanK['enddate'][ biIdx[i]] # print i,k_data['high'][dt], k_data['low'][dt] time_long = long( time.mktime( (dt + datetime.timedelta(hours=8)).timetuple()) * 1000000000) # print x_date_list.index(time_long) if time_long in x_date_list else 0 if fenType == 1: if plt is not None: if color is None: plt.text(x_date_list.index(time_long), k_data['high'][dt], str(k_data['high'][dt]), ha='left', fontsize=12) else: col_v = color[0] if fenType > 0 else color[1] plt.text(x_date_list.index(time_long), k_data['high'][dt], str(k_data['high'][dt]), ha='left', fontsize=12, bbox=dict(facecolor=col_v, alpha=0.5)) x_fenbi_seq.append(x_date_list.index(time_long)) y_fenbi_seq.append(k_data['high'][dt]) if fenType == -1: if plt is not None: if color is None: plt.text(x_date_list.index(time_long), k_data['low'][dt], str(k_data['low'][dt]), va='bottom', fontsize=12) else: col_v = color[0] if fenType > 0 else color[1] plt.text(x_date_list.index(time_long), k_data['low'][dt], str(k_data['low'][dt]), va='bottom', fontsize=12, bbox=dict(facecolor=col_v, alpha=0.5)) x_fenbi_seq.append(x_date_list.index(time_long)) y_fenbi_seq.append(k_data['low'][dt]) # bottom_time = None # for k_line_dto in m_line_dto.member_list[::-1]: # if k_line_dto.low == m_line_dto.low: # # get_price返回的日期,默认时间是08:00:00 # bottom_time = k_line_dto.begin_time.strftime('%Y-%m-%d') +' 08:00:00' # break # x_fenbi_seq.append(x_date_list.index(long(time.mktime(datetime.strptime(bottom_time, "%Y-%m-%d %H:%M:%S").timetuple())*1000000000))) # y_fenbi_seq.append(m_line_dto.low) return x_fenbi_seq, y_fenbi_seq # print T0[-len(T0):].astype(dt.date) T1 = T0[-len(T0):].astype(datetime.date) / 1000000000 Ti = [] if len(T0) / x_jizhun > 12: x_jizhun = len(T0) / 12 for i in range(len(T0) / x_jizhun): # print "len(T0)/x_jizhun:",len(T0)/x_jizhun a = i * x_jizhun d = datetime.date.fromtimestamp(T1[a]) # print d T2 = d.strftime('$%Y-%m-%d$') Ti.append(T2) # print tab d1 = datetime.date.fromtimestamp(T1[len(T0) - 1]) d2 = (d1 + datetime.timedelta(days=1)).strftime('$%Y-%m-%d$') Ti.append(d2) ll = Low.min() * 0.97 hh = High.max() * 1.03 # ht = HoverTool(tooltips=[ # ("date", "@date"), # ("open", "@open"), # ("close", "@close"), # ("high", "@high"), # ("low", "@low"), # ("volume", "@volume"), # ("money", "@money"),]) # TOOLS = [ht, WheelZoomTool(dimensions=['width']),\ # ResizeTool(), ResetTool(),\ # PanTool(dimensions=['width']), PreviewSaveTool()] if show_mpl: fig = plt.figure(figsize=(10, 6)) ax1 = plt.subplot2grid((10, 1), (0, 0), rowspan=8, colspan=1) # ax1 = fig.add_subplot(2,1,1) #fig = plt.figure() #ax1 = plt.axes([0,0,3,2]) X = np.array(range(0, length)) pad_nan = X + nan # 计算上 下影线 max_clop = Close.copy() max_clop[Close < Open] = Open[Close < Open] min_clop = Close.copy() min_clop[Close > Open] = Open[Close > Open] # 上影线 line_up = np.array([High, max_clop, pad_nan]) line_up = np.ravel(line_up, 'F') # 下影线 line_down = np.array([Low, min_clop, pad_nan]) line_down = np.ravel(line_down, 'F') # 计算上下影线对应的X坐标 pad_nan = nan + X pad_X = np.array([X, X, X]) pad_X = np.ravel(pad_X, 'F') # 画出实体部分,先画收盘价在上的部分 up_cl = Close.copy() up_cl[Close <= Open] = nan up_op = Open.copy() up_op[Close <= Open] = nan down_cl = Close.copy() down_cl[Open <= Close] = nan down_op = Open.copy() down_op[Open <= Close] = nan even = Close.copy() even[Close != Open] = nan # 画出收红的实体部分 pad_box_up = np.array([up_op, up_op, up_cl, up_cl, pad_nan]) pad_box_up = np.ravel(pad_box_up, 'F') pad_box_down = np.array([down_cl, down_cl, down_op, down_op, pad_nan]) pad_box_down = np.ravel(pad_box_down, 'F') pad_box_even = np.array([even, even, even, even, pad_nan]) pad_box_even = np.ravel(pad_box_even, 'F') # X的nan可以不用与y一一对应 X_left = X - 0.25 X_right = X + 0.25 box_X = np.array([X_left, X_right, X_right, X_left, pad_nan]) # print box_X box_X = np.ravel(box_X, 'F') # print box_X # Close_handle=plt.plot(pad_X,line_up,color='k') vertices_up = np.array([box_X, pad_box_up]).T vertices_down = np.array([box_X, pad_box_down]).T vertices_even = np.array([box_X, pad_box_even]).T handle_box_up = mat.patches.Polygon(vertices_up, color='r', zorder=1) handle_box_down = mat.patches.Polygon(vertices_down, color='g', zorder=1) handle_box_even = mat.patches.Polygon(vertices_even, color='k', zorder=1) ax1.add_patch(handle_box_up) ax1.add_patch(handle_box_down) ax1.add_patch(handle_box_even) handle_line_up = mat.lines.Line2D(pad_X, line_up, color='k', linestyle='solid', zorder=0) handle_line_down = mat.lines.Line2D(pad_X, line_down, color='k', linestyle='solid', zorder=0) ax1.add_line(handle_line_up) ax1.add_line(handle_line_down) v = [0, length, Open.min() - 0.5, Open.max() + 0.5] plt.axis(v) ax1.set_xticks(np.linspace(-2, len(Close) + 2, len(Ti))) ax1.set_ylim(ll, hh) ax1.set_xticklabels(Ti) plt.grid(True) plt.setp(plt.gca().get_xticklabels(), rotation=30, horizontalalignment='right') ''' 以上代码拷贝自https://www.joinquant.com/post/1756 感谢alpha-smart-dog K线图绘制完毕 ''' # print "biIdx:%s chankIdx:%s"%(biIdx,str(chanKIdx[-1])[:10]) if show_mpl: x_fenbi_seq, y_fenbi_seq = plot_fenbi_seq(biIdx, frsBiType, plt) # plot_fenbi_seq(fenIdx,fenTypes[0], plt,color=['red','green']) plot_fenbi_seq(fenIdx, frsBiType, plt, color=['red', 'green']) else: x_fenbi_seq, y_fenbi_seq = plot_fenbi_seq(biIdx, frsBiType, plt=None) plot_fenbi_seq(fenIdx, frsBiType, plt=None, color=['red', 'green']) # 在原图基础上添加分笔蓝线 inx_value = chanK.high.values inx_va = [round(inx_value[x], 2) for x in biIdx] log.debug("inx_va:%s count:%s" % (inx_va, len(quotes.high))) log.debug("yfenbi:%s count:%s" % ([round(y, 2) for y in y_fenbi_seq], len(chanK))) j_BiType = [ -frsBiType if i % 2 == 0 else frsBiType for i in range(len(biIdx)) ] BiType_s = j_BiType[-1] if len(j_BiType) > 0 else -2 # bi_price = [str(chanK.low[idx]) if i % 2 == 0 else str(chanK.high[idx]) for i,idx in enumerate(biIdx)] # print ("笔 :%s %s"%(biIdx,bi_price)) # fen_dt = [str(chanK.index[fenIdx[i]])[:10] if chanK_flag else str(chanK['enddate'][fenIdx[i]])[:10]for i in range(len(fenIdx))] fen_dt = [(chanK.index[fenIdx[i]]) if chanK_flag else (chanK['enddate'][fenIdx[i]]) for i in range(len(fenIdx))] if len(fenTypes) > 0: if fenTypes[0] == -1: # fen_price = [str(k_data.low[idx]) if i % 2 == 0 else str(k_data.high[idx]) for i,idx in enumerate(fen_dt)] low_fen = [idx for i, idx in enumerate(fen_dt) if i % 2 == 0] high_fen = [idx for i, idx in enumerate(fen_dt) if i % 2 <> 0] else: # fen_price = [str(k_data.high[idx]) if i % 2 == 0 else str(k_data.low[idx]) for i,idx in enumerate(fen_dt)] high_fen = [idx for i, idx in enumerate(fen_dt) if i % 2 == 0] low_fen = [idx for i, idx in enumerate(fen_dt) if i % 2 <> 0] # fen_duration =[fenIdx[i] - fenIdx[i -1 ] if i >0 else 0 for i,idx in enumerate(fenIdx)] else: # fen_price = fenTypes # fen_duration = fenTypes low_fen = [] high_fen = [] # fen_dt = [str(k_data.index[idx])[:10] for i,idx in enumerate(fenIdx)] # print low_fen,high_fen def dataframe_mode_round(df): roundlist = [1, 0] df_mode = [] # df.high.cummin().value_counts() for i in roundlist: df_mode = df.apply(lambda x: round(x, i)).mode() if len(df_mode) > 0: break return df_mode kdl = k_data.loc[low_fen].low kdl_mode = dataframe_mode_round(kdl) kdh = k_data.loc[high_fen].high kdh_mode = dataframe_mode_round(kdh) print("kdl:%s" % (kdl.values)) print("kdh:%s" % (kdh.values)) print("kdl_mode:%s kdh_mode%s chanKidx:%s" % (kdl_mode.values, kdh_mode.values, str(chanKIdx[-1])[:10])) lastdf = k_data[k_data.index >= chanKIdx[-1]] if BiType_s == -1: keydf = lastdf[((lastdf.close >= kdl_mode.max()) & (lastdf.low >= kdl_mode.max()))] elif BiType_s == 1: keydf = lastdf[((lastdf.close >= kdh_mode.max()) & (lastdf.high >= kdh_mode.min()))] else: keydf = lastdf[((lastdf.close >= kdh_mode.max()) & (lastdf.high >= kdh_mode.min())) | ((lastdf.close <= kdl_mode.min()) & (lastdf.low <= kdl_mode.min()))] print("BiType_s:%s keydf:%s key:%s" % (BiType_s, None if len(keydf) == 0 else str( keydf.index.values[0])[:10], len(keydf))) # return BiType_s,None if len(keydf) == 0 else str(keydf.index.values[0])[:10],len(keydf) # import ipdb;ipdb.set_trace() log.debug("Fentype:%s " % (fenTypes)) log.debug("fenIdx:%s " % (fenIdx)) # print ("fen_duration:%s "%(fen_duration)) # print ("fen_price:%s "%(fen_price)) # print ("fendt:%s "%(fen_dt)) print("BiType :%s frsBiType:%s" % (j_BiType, frsBiType)) if len(j_BiType) > 0: if j_BiType[0] == -1: tb_price = [ str(quotes.low[idx]) if i % 2 == 0 else str(quotes.high[idx]) for i, idx in enumerate(x_fenbi_seq) ] else: tb_price = [ str(quotes.high[idx]) if i % 2 == 0 else str(quotes.low[idx]) for i, idx in enumerate(x_fenbi_seq) ] tb_duration = [ x_fenbi_seq[i] - x_fenbi_seq[i - 1] if i > 0 else 0 for i, idx in enumerate(x_fenbi_seq) ] else: tb_price = j_BiType tb_duration = j_BiType print "图笔 :", x_fenbi_seq, tb_price print "图笔dura :", tb_duration # 线段画到笔上 xdIdxs, xfenTypes = chan.parse2ChanXD(frsBiType, biIdx, chanK) print '线段', xdIdxs, xfenTypes x_xd_seq = [] y_xd_seq = [] for i in range(len(xdIdxs)): if xdIdxs[i] is not None: fenType = xfenTypes[i] # dt = chanK['enddate'][biIdx[i]] # 缠论k线 dt = chanK.index[xdIdxs[i]] if chanK_flag else chanK['enddate'][ xdIdxs[i]] # print k_data['high'][dt], k_data['low'][dt] time_long = long( time.mktime((dt + datetime.timedelta(hours=8)).timetuple()) * 1000000000) # print x_date_list.index(time_long) if time_long in x_date_list else 0 if fenType == 1: x_xd_seq.append(x_date_list.index(time_long)) y_xd_seq.append(k_data['high'][dt]) if fenType == -1: x_xd_seq.append(x_date_list.index(time_long)) y_xd_seq.append(k_data['low'][dt]) # bottom_time = None # for k_line_dto in m_line_dto.member_list[::-1]: # if k_line_dto.low == m_line_dto.low: # # get_price返回的日期,默认时间是08:00:00 # bottom_time = k_line_dto.begin_time.strftime('%Y-%m-%d') +' 08:00:00' # break # x_fenbi_seq.append(x_date_list.index(long(time.mktime(datetime.strptime(bottom_time, "%Y-%m-%d %H:%M:%S").timetuple())*1000000000))) # y_fenbi_seq.append(m_line_dto.low) # 在原图基础上添加分笔蓝线 print("线段 :%s" % (x_xd_seq)) print("笔值 :%s" % ([str(x) for x in (y_xd_seq)])) # Y_hat = X * b + a if show_mpl: plt.plot(x_fenbi_seq, y_fenbi_seq) plt.legend([stock_code, cname], loc=0) plt.title(stock_code + " | " + cname + " | " + str(quotes.index[-1])[:10], fontsize=14) plt.plot(x_xd_seq, y_xd_seq) if len(quotes) > windows: roll_mean = pd.rolling_mean(quotes.close, window=windows) plt.plot(roll_mean, 'r') zp = zoompan.ZoomPan() figZoom = zp.zoom_factory(ax1, base_scale=1.1) figPan = zp.pan_factory(ax1) '''#subplot2 bar ax2 = plt.subplot2grid((10, 1), (8, 0), rowspan=2, colspan=1) # ax2.plot(quotes.vol) # ax2.set_xticks(np.linspace(-2, len(quotes) + 2, len(Ti))) ll = min(quotes.vol.values.tolist()) * 0.97 hh = max(quotes.vol.values.tolist()) * 1.03 ax2.set_ylim(ll, hh) # ax2.set_xticklabels(Ti) # plt.hist(quotes.vol, histtype='bar', rwidth=0.8) plt.bar(x_date_list,quotes.vol, label="Volume", color='b') ''' #画Volume no tight_layout() ''' pad = 0.25 yl = ax1.get_ylim() ax1.set_ylim(yl[0]-(yl[1]-yl[0])*pad,yl[1]) ax2 = ax1.twinx() ax2.set_position(mat.transforms.Bbox([[0.125,0.1],[0.9,0.32]])) volume = np.asarray(quotes.amount) pos = quotes['open']-quotes['close']<0 neg = quotes['open']-quotes['close']>=0 idx = quotes.reset_index().index ax2.bar(idx[pos],volume[pos],color='red',width=1,align='center') ax2.bar(idx[neg],volume[neg],color='green',width=1,align='center') yticks = ax2.get_yticks() ax2.set_yticks(yticks[::3]) ''' # same sharex plt.subplots_adjust(left=0.05, bottom=0.08, right=0.95, top=0.95, wspace=0.15, hspace=0.00) plt.setp(ax1.get_xticklabels(), visible=False) yl = ax1.get_ylim() # ax2 = plt.subplot(212, sharex=ax1) ax2 = plt.subplot2grid((10, 1), (8, 0), rowspan=2, colspan=1, sharex=ax1) # ax2.set_position(mat.transforms.Bbox([[0.125,0.1],[0.9,0.32]])) volume = np.asarray(quotes.amount) pos = quotes['open'] - quotes['close'] < 0 neg = quotes['open'] - quotes['close'] >= 0 idx = quotes.reset_index().index ax2.bar(idx[pos], volume[pos], color='red', width=1, align='center') ax2.bar(idx[neg], volume[neg], color='green', width=1, align='center') yticks = ax2.get_yticks() ax2.set_yticks(yticks[::3]) # plt.tight_layout() # plt.subplots_adjust(hspace=0.00, bottom=0.08) plt.xticks(rotation=15, horizontalalignment='center') # plt.bar(x_date_list,quotes.vol, label="Volume", color='b') # quotes['vol'].plot(kind='bar', ax=ax2, color='g', alpha=0.1) # ax2.set_ylim([0, ax2.get_ylim()[1] * 2]) # plt.gcf().subplots_adjust(bottom=0.15) # fig.subplots_adjust(left=0.05, bottom=0.08, right=0.95, top=0.95, wspace=0.15, hspace=0.25) #scale the x-axis tight # ax2.set_xlim(min(x_date_list),max(x_date_list)) # the y-ticks for the bar were too dense, keep only every third one # plt.grid(True) # plt.xticks(rotation=30, horizontalalignment='center') # plt.setp( axs[1].xaxis.get_majorticklabels(), rotation=70 ) # plt.legend() # plt.tight_layout() # plt.draw() # plt.show() plt.show(block=False)
from pylab import plt import viscojapan as vj from viscojapan.tsana.post_fit.post import fit_post site = 'J550' cmpt = 'e' ylim = (-0.1, 1) pplt = vj.inv.PredictedTimeSeriesPlotter( partition_file = 'deformation_partition.h5', result_file = 'nrough_06_naslip_11.h5' ) pplt.plot_post_disp_decomposition(site, cmpt, marker_for_obs='.') plt.ylim(ylim) def ajust_xaxis_tick_labels(ax): for tick in ax.xaxis.get_major_ticks(): tick.label.set_fontsize(8) # specify integer or one of preset strings, e.g. #tick.label.set_fontsize('x-small') tick.label.set_rotation('vertical') ajust_xaxis_tick_labels(plt.gca()) plt.savefig('model_prediction_%s-%s.png'%(site, cmpt)) plt.savefig('model_prediction_%s-%s.pdf'%(site, cmpt)) plt.show()
disp0 = np.asarray([ii[1] for ii in tp]).flatten() us0 = disp0[2::3] ep = vj.EpochalDisplacement('cumu_post_with_seafloor.h5', filter_sites=sites) disp1 = ep[0] us1 = disp1[2::3] plt.subplot(121) bm = vj.MyBasemap(region_code='near') mplt = vj.MapPlotDisplacement(basemap=bm) mplt.plot_scalor(us0, sites, cmap='RdBu') mplt = vj.MapPlotSlab(basemap=bm) mplt.plot_top() plt.clim([-1., 1.]) plt.subplot(122) bm = vj.MyBasemap(region_code='near') mplt = vj.MapPlotDisplacement(basemap=bm) im = mplt.plot_scalor(us1, sites, cmap='RdBu') mplt = vj.MapPlotSlab(basemap=bm) mplt.plot_top() plt.clim([-1., 1.]) divider = make_axes_locatable(plt.gca()) cax = divider.append_axes("right", size="5%", pad=0.05) plt.colorbar(im, cax=cax) plt.savefig('compare.pdf') plt.show()
#plt.plot(100,101, '.', color=Sps, label='SpIES') #plt.plot(100,101, '.', color=sts, label='Stars') #plt.plot(100,101, '.', color=lzs, label='z<2.2 QSOs') plt.plot(100,101, '.', color=sc[0], label=r'3.5$\textless$ z $\textless$ 5 QSOs') #Plot Assef 2013 BOX WISE,=plt.plot([0.119,4],[18.402,18.402], color='k',linestyle='--',linewidth=1, dashes = (10,10)) plt.plot([0.119,0.119],[0,18.402],color='k',linestyle='--',linewidth=1, dashes = (10,10)) #Plot the W1 5 sigma line in this color space W1,=plt.plot(col,ch2prime, color='k',linestyle='-.',linewidth=1, dashes = [8,4,2,4]) #SpIES 5sigma line (CH2) plt.axhline(22.0, linestyle='--',linewidth=1, color='b', dashes = (10,10),label=r'SpIES 5$\sigma$') plt.xlabel(r'[3.6]$-$[4.5] Color') plt.ylabel('[4.5]') first_legend = plt.legend([WISE,W1],['Assef et al. 2013 limits',r'WISE W1 5$\sigma$'],loc=1) ax = plt.gca().add_artist(first_legend) plt.legend(loc=2,markerscale=2, scatterpoints=1) fig.set_size_inches(10.0,10.0) ax2.minorticks_on() ax2.yaxis.set_major_locator(majorLocator) ax2.yaxis.set_major_formatter(majorFormatter) ax2.yaxis.set_minor_locator(minorLocator) #ax2.yaxis.set_minor_formatter(majorFormatter) label = ax2.get_yticks() plt.yticks(label,rotation=90) plt.xlim(-4,4) plt.ylim(13,23)
def _timeseries_postplot(self): ax = plt.gca() fig = ax.get_figure() self.cur_sync.add_axis(ax) cid_btn = fig.canvas.mpl_connect('button_press_event', self._timeseries_click_cb) cid_key = fig.canvas.mpl_connect('key_release_event', self._timeseries_key_release_cb)
m = fid['m'][...] visMs.append(m[-3]) Hes.append(m[-2]) rakes.append(m[-1]) nrough = fid['regularization/roughening/norm'][...] nroughs.append(nrough) xlim = (7, 22) xlim = None xticks = range(7,22) plt.subplot(411) plt.semilogx(nreses, visMs,'o') plt.xlim(xlim) plt.gca().set_xticks(xticks) plt.grid('on') plt.ylabel('log10(visM/(Pa.s))') plt.subplot(412) plt.semilogx(nreses, Hes,'o') plt.xlim(xlim) plt.gca().set_xticks(xticks) plt.grid('on') plt.ylabel('He/km') plt.subplot(413) plt.semilogx(nreses, rakes,'o') plt.xlim(xlim) plt.gca().set_xticks(xticks) plt.ylabel('rake')
def detect_signal(file, window, samplerate, df, every, v, xf, f_center): ##### detect # loading in sdr data # loading in the database what frequencies are known db_satname = ["NOAA19", "NOAA15", "NOAA18", "ISS", "ISS APRS"] db_f = [137100000.0, 137620000.0, 137912500.0, 145800000.0, 145825000.0] db_f_band = [24000.0, 24000.0, 24000.0, 6000.0, 6000.0] f = [] f_band = [] for kkk in range(len(db_f)): if db_f[kkk] >= f_center - samplerate and db_f[kkk] <= f_center + samplerate: f.append(db_f[kkk]) f_band.append(db_f_band[kkk]) print(f) start = [] bandrange = [] for kkkk in range(len(f)): start.append(int((f[kkkk] - f_center) / df + len(xf)/2.0)) #print(start[-1], (0), df, len(xf)/2.0) bandrange.append(int(f_band[kkkk]/df)) #print("band", bandrange[-1]) #print("test", start[-1], df, bandrange[-1]) print(time.time(), timeit.default_timer()-time_start, "graphing start") foundknownsignal = [] foundknownsignal1 = [] for j in range(len(f)): w1 = [] w2 = [] for ll in range(len(v)): w1.append(v[ll][start[j] - bandrange[j]: start[j] + bandrange[j]]) w2.append(v[ll][start[j] - (bandrange[j]+int(60000/df)): start[j] + (bandrange[j]+int(60000/df))]) foundknownsignal.append(signal_distance(w1, every)) foundknownsignal1.append(int(np.sum(w1))) filename = file+"_"+str(f[j])+"_sig"+str(foundknownsignal[j])+"_"+str(foundknownsignal1[j])+".png" filename1 = file+"_"+str(f[j])+"x_sig"+str(foundknownsignal[j])+"_"+str(foundknownsignal1[j])+".png" plt.imshow(w1, interpolation='nearest') plt.gca().invert_yaxis() plt.savefig(filename, format='png') #plt.show() plt.imshow(w2, interpolation='nearest') #plt.imshow(result) plt.gca().invert_yaxis() plt.savefig(filename1, format='png', dpi=100) #plt.show() print(time.time(), timeit.default_timer()-time_start, "graphing end") ''' #signal_strength = signal_strength / np.max(signal_strength) #signal_strength1 = signal_strength1 / np.max(signal_strength1) print(len(signal_strength)) print(signal_strength) #plt.plot(signal_strength) plt.plot(xf, frr) plt.show() ''' return foundknownsignal, foundknownsignal1
mx='{:e}'.format(np.max(y)) mx=10*10**int(mx[-3:]) ttxt='case {} and test function {}: benchmarking {}'.format(nc,nc,eat) ttxt+='\nbest error: mean {} and std {} after {} evaluations'.format(mean(y[:,-1]),std(y[:,-1]),mean(neval[:,-1])) date=give_datestring() plt.figure() for i,xval in enumerate(x[0]): if i==0: rxmin, rxmax = xval-0.004, xval+0.004 else: rxmin, rxmax = xval-0.03, xval+0.03 rymin, rymax = np.min(y[:,i]), np.max(y[:,i]) rect = plt.Rectangle((rxmin, rymin), rxmax-rxmin, rymax-rymin, facecolor='grey',alpha=0.4) plt.gca().add_patch(rect) for i,sc in enumerate(loaded): plt.plot(x[i],y[i]) if nc != 8: plt.semilogy() #if nc in yldict: plt.ylim(yldict[nc]) plt.xlabel('FES / maxFES') plt.ylabel(r'error = $f_i(x)-f_i(x^*)$') plt.suptitle(ttxt,x=0.5,y=0.98, ha='center',va='top', fontsize=10) plt.suptitle(date,x=0.97,y=0.02, ha='right',va='bottom', fontsize=8) plt.savefig(join(plotloc,'allruns_c'+str(nc).zfill(3)+'_'+eat+'_'+df[:-1]+'.png')) plt.close() # for i,xval in enumerate(x[0]): # if i==0: # rxmin, rxmax = xval-0.004, xval+0.004 # else:
plt.legend() plt.subplot(5,1,4) plt.plot(np.arange(v.size),v[:,0],'.b',label='GPU velocities') plt.legend() plt.subplot(5,1,5) for i in range(0,N,32): # print posH[i::N].shape plt.plot(np.ones(T+1)*i,posH[i::N],'r-x') plt.plot(np.ones(1)*i,posH[i,0],'bo') plt.legend() plt.show() elif dim == 2: fig=plt.figure() plt.subplot(4,1,1) plt.plot(my_dict['pts_at_0'][:,0],my_dict['pts_at_0'][:,1],'.r') plt.gca().invert_yaxis() plt.subplot(4,1,2) plt.plot(posT[:,0],posT[:,1],'.b',label='GPU') plt.gca().invert_yaxis() plt.legend() plt.subplot(4,1,3) plt.plot(my_dict['CPU_results'][0,:],my_dict['CPU_results'][1,:],'.r',label='CPU') plt.plot(posT[:,0],posT[:,1],'.b',label='GPU') plt.gca().invert_yaxis() plt.legend() plt.subplot(4,1,4) for i in range(0,N,32): plt.plot(posH[i::N,0],posH[i::N,1],'r') # plt.plot(posH[i::N,0],posH[i::N,1],'r') # plt.plot(posH[i,0],posH[i,1],'bo') plt.gca().invert_yaxis()
def invert_y_axis_if_needed(): g = plt.gca() bottom, top = g.get_ylim() if top>bottom: g.set_ylim(top, bottom)
def plot_variable(u, name, direc, cmap='gist_yarg', scale='lin', numLvls=12, umin=None, umax=None, tp=False, tpAlpha=0.5, show=True, hide_ax_tick_labels=False, label_axes=True, title='', use_colorbar=True, hide_axis=False, colorbar_loc='right'): """ """ mesh = u.function_space().mesh() v = u.compute_vertex_values(mesh) x = mesh.coordinates()[:, 0] y = mesh.coordinates()[:, 1] t = mesh.cells() d = os.path.dirname(direc) if not os.path.exists(d): os.makedirs(d) if umin != None: vmin = umin else: vmin = v.min() if umax != None: vmax = umax else: vmax = v.max() # countour levels : if scale == 'log': v[v < vmin] = vmin + 1e-12 v[v > vmax] = vmax - 1e-12 from matplotlib.ticker import LogFormatter levels = np.logspace(np.log10(vmin), np.log10(vmax), numLvls) formatter = LogFormatter(10, labelOnlyBase=False) norm = colors.LogNorm() elif scale == 'lin': v[v < vmin] = vmin + 1e-12 v[v > vmax] = vmax - 1e-12 from matplotlib.ticker import ScalarFormatter levels = np.linspace(vmin, vmax, numLvls) formatter = ScalarFormatter() norm = None elif scale == 'bool': from matplotlib.ticker import ScalarFormatter levels = [0, 1, 2] formatter = ScalarFormatter() norm = None fig = plt.figure(figsize=(8, 7)) ax = fig.add_subplot(111) c = ax.tricontourf(x, y, t, v, levels=levels, norm=norm, cmap=pl.get_cmap(cmap)) plt.axis('equal') if tp == True: p = ax.triplot(x, y, t, 'k-', lw=0.25, alpha=tpAlpha) ax.set_xlim([x.min(), x.max()]) ax.set_ylim([y.min(), y.max()]) if label_axes: ax.set_xlabel(r'$x$') ax.set_ylabel(r'$y$') if hide_ax_tick_labels: ax.set_xticklabels([]) ax.set_yticklabels([]) if hide_axis: plt.axis('off') # include colorbar : if scale != 'bool' and use_colorbar: divider = make_axes_locatable(plt.gca()) cax = divider.append_axes(colorbar_loc, "5%", pad="3%") cbar = plt.colorbar(c, cax=cax, format=formatter, ticks=levels) pl.mpl.rcParams['axes.titlesize'] = 'small' tit = plt.title(title) plt.tight_layout() d = os.path.dirname(direc) if not os.path.exists(d): os.makedirs(d) plt.savefig(direc + name + '.pdf') if show: plt.show() plt.close(fig)
def signal_substraction(file, window, samplerate, df, every, result_of_fft): # we asume, that our satellite signals are not contineously received # so we do a substraction on frequency level. the signal intensity of each frequency for the window kernel # is substracted from the overall, longtime average of this frequency. # this allows to see fluctuations better, because they are adding to the average. # so we can find a superposed signal easier this way. threshold = meaning(result_of_fft) signal_lowered = substract(result_of_fft, threshold) print(time.time(), timeit.default_timer()-time_start, "lowering done") v = np.zeros((len(signal_lowered), window)) #w = np.zeros((len(signal_lowered), window)) bandwidth = int(window / (2*2*2*2*2*2*2*2*2)) #print("band", bandwidth*df) for j in range(0, window, bandwidth): #print(j) u = [] for kk in range(len(signal_lowered)): u.append(signal_lowered[kk][j:j+bandwidth]) #u = u/np.max(u) #print("test", len(u), len(u[0])) ''' plt.imshow(u, interpolation='nearest') #plt.imshow(result) plt.gca().invert_yaxis() #plt.gca().invert_xaxis() # #plt.gca().set_xticks(xf) plt.show() ''' edged = signal(edge_detection1(u)) #print(np.max(edged)) #graved = centerofgravity(edged) #signal_strength.append(np.sum(edged)) for k in range(len(edged)): for l in range(len(edged[k])): v[k][j+l] = edged[k][l]# + signaaaal print(time.time(), timeit.default_timer()-time_start, "edging done") w3 = [] filename3 = file+"_all.png" for lll in range(len(v)): w3.append(signal_lowered[lll]) plt.imshow(w3, interpolation='nearest') #plt.imshow(result) plt.gca().invert_yaxis() #plt.figure(figsize=(1200, 800)) plt.savefig(filename3, format='png', dpi=500) #plt.savefig(filename, format='png', dpi=1000) #plt.show() del w3 return v
def plot_variable(u, name, direc, cmap=cmaps.parula, scale='lin', numLvls=100, umin=None, umax=None, \ tp=False, \ tpAlpha=1.0, show=False, hide_ax_tick_labels=False, label_axes=True, title='', use_colorbar=True, hide_axis=False, colorbar_loc='right'): """ show -- whether to show the plot on the screen tp -- show triangle cmap -- colors: gist_yarg - grey gnuplot, hsv, gist_ncar jet - typical colors """ mesh = u.function_space().mesh() v = u.compute_vertex_values(mesh) x = mesh.coordinates()[:,0] y = mesh.coordinates()[:,1] t = mesh.cells() if not os.path.isdir( direc ): os.makedirs(direc) full_path = os.path.join(direc, name) if umin != None: vmin = umin else: vmin = v.min() if umax != None: vmax = umax else: vmax = v.max() # countour levels : if scale == 'log': v[v < vmin] = vmin + 1e-12 v[v > vmax] = vmax - 1e-12 from matplotlib.ticker import LogFormatter levels = np.logspace(np.log10(vmin), np.log10(vmax), numLvls) tick_numLvls = min( numLvls, 8 ) tick_levels = np.logspace(np.log10(vmin), np.log10(vmax), tick_numLvls) formatter = LogFormatter(10, labelOnlyBase=False) norm = colors.LogNorm() elif scale == 'lin': v[v < vmin] = vmin + 1e-12 v[v > vmax] = vmax - 1e-12 from matplotlib.ticker import ScalarFormatter levels = np.linspace(vmin, vmax, numLvls) tick_numLvls = min( numLvls, 8 ) tick_levels = np.linspace(vmin, vmax, tick_numLvls) formatter = ScalarFormatter() norm = None elif scale == 'bool': from matplotlib.ticker import ScalarFormatter levels = [0, 1, 2] formatter = ScalarFormatter() norm = None fig = plt.figure(figsize=(5,5)) ax = fig.add_subplot(111) c = ax.tricontourf(x, y, t, v, levels=levels, norm=norm, cmap=plt.get_cmap(cmap)) plt.axis('equal') if tp == True: p = ax.triplot(x, y, t, '-', lw=0.2, alpha=tpAlpha) ax.set_xlim([x.min(), x.max()]) ax.set_ylim([y.min(), y.max()]) if label_axes: ax.set_xlabel(r'$x$') ax.set_ylabel(r'$y$') if hide_ax_tick_labels: ax.set_xticklabels([]) ax.set_yticklabels([]) if hide_axis: plt.axis('off') # include colorbar : if scale != 'bool' and use_colorbar: divider = make_axes_locatable(plt.gca()) cax = divider.append_axes(colorbar_loc, "5%", pad="3%") cbar = plt.colorbar(c, cax=cax, format=formatter, ticks=tick_levels) tit = plt.title(title) if use_colorbar: plt.tight_layout(rect=[.03,.03,0.97,0.97]) else: plt.tight_layout() plt.savefig( full_path + '.eps', dpi=300) if show: plt.show() plt.close(fig)
def mkplots(self): # run to make plots of the resulting posteriors. Modified from marginal_plots.py # from pymultinest. Produces basename+marg.pdf and basename+marge.png files prefix = self.basename parameters = json.load(file(prefix + 'params.json')) n_params = len(parameters) a = pymultinest.Analyzer(n_params = n_params, outputfiles_basename = prefix) s = a.get_stats() p = pymultinest.PlotMarginal(a) try: values = a.get_equal_weighted_posterior() except IOError as e: print 'Unable to open: %s' % e return assert n_params == len(s['marginals']) modes = s['modes'] dim2 = os.environ.get('D', '1' if n_params > 20 else '2') == '2' nbins = 100 if n_params < 3 else 20 if dim2: plt.figure(figsize=(5.1*n_params, 5*n_params)) for i in range(n_params): plt.subplot(n_params, n_params, i + 1) plt.xlabel(parameters[i]) m = s['marginals'][i] plt.xlim(m['5sigma']) oldax = plt.gca() x,w,patches = oldax.hist(values[:,i], bins=nbins, edgecolor='grey', color='grey', histtype='stepfilled', alpha=0.2) oldax.set_ylim(0, x.max()) newax = plt.gcf().add_axes(oldax.get_position(), sharex=oldax, frameon=False) p.plot_marginal(i, ls='-', color='blue', linewidth=3) newax.set_ylim(0, 1) ylim = newax.get_ylim() y = ylim[0] + 0.05*(ylim[1] - ylim[0]) center = m['median'] low1, high1 = m['1sigma'] print center, low1, high1 newax.errorbar(x=center, y=y, xerr=np.transpose([[center - low1, high1 - center]]), color='blue', linewidth=2, marker='s') oldax.set_yticks([]) #newax.set_yticks([]) newax.set_ylabel("Probability") ylim = oldax.get_ylim() newax.set_xlim(m['5sigma']) oldax.set_xlim(m['5sigma']) #plt.close() for j in range(i): plt.subplot(n_params, n_params, n_params * (j + 1) + i + 1) p.plot_conditional(i, j, bins=20, cmap = plt.cm.gray_r) for m in modes: plt.errorbar(x=m['mean'][i], y=m['mean'][j], xerr=m['sigma'][i], yerr=m['sigma'][j]) ax = plt.gca() if j == i-1: plt.xlabel(parameters[i]) plt.ylabel(parameters[j]) [l.set_rotation(45) for l in ax.get_xticklabels()] else: ax.set_xticklabels([]) ax.set_yticklabels([]) plt.xlim([m['mean'][i]-5*m['sigma'][i],m['mean'][i]+5*m['sigma'][i]]) plt.ylim([m['mean'][j]-5*m['sigma'][j],m['mean'][j]+5*m['sigma'][j]]) #plt.savefig('cond_%s_%s.pdf' % (params[i], params[j]), bbox_tight=True) #plt.close() plt.tight_layout() plt.savefig(prefix + 'marg.pdf') plt.savefig(prefix + 'marg.png') plt.close() else: from matplotlib.backends.backend_pdf import PdfPages print '1dimensional only. Set the D environment variable D=2 to force' print '2d marginal plots.' pp = PdfPages(prefix + 'marg1d.pdf') for i in range(n_params): plt.figure(figsize=(5, 5)) plt.xlabel(parameters[i]) m = s['marginals'][i] plt.xlim(m['5sigma']) oldax = plt.gca() x,w,patches = oldax.hist(values[:,i], bins=20, edgecolor='grey', color='grey', histtype='stepfilled', alpha=0.2) oldax.set_ylim(0, x.max()) newax = plt.gcf().add_axes(oldax.get_position(), sharex=oldax, frameon=False) p.plot_marginal(i, ls='-', color='blue', linewidth=3) newax.set_ylim(0, 1) ylim = newax.get_ylim() y = ylim[0] + 0.05*(ylim[1] - ylim[0]) center = m['median'] low1, high1 = m['1sigma'] print center, low1, high1 newax.errorbar(x=center, y=y, xerr=np.transpose([[center - low1, high1 - center]]), color='blue', linewidth=2, marker='s') oldax.set_yticks([]) newax.set_ylabel("Probability") ylim = oldax.get_ylim() newax.set_xlim(m['5sigma']) oldax.set_xlim(m['5sigma']) plt.savefig(pp, format='pdf', bbox_inches='tight') plt.close() pp.close()
tic = time.clock() # for k in range(10): posT = flowL.calc_flowline(my_dict['xmins'], my_dict['ymins'], my_dict['xmaxs'], my_dict['ymaxs'], my_dict['Trels'], my_dict['As'], pos0, dt, my_dict['nTimeSteps'], nStepsODEsolver) toc = time.clock() print 'Time:', toc - tic, '[sec]' #sw.toctic("GPU compute") # ipshell('hi') if TF.show: fig = plt.figure() plt.plot(posT[:, 0], posT[:, 1], '.') plt.gca().invert_yaxis() fig.show() # plotting lines_shape = (512, 512) fig = plt.figure() for i in range(lines_shape[0]): plt.plot(posT[i * lines_shape[1]:(i + 1) * lines_shape[1], 0], posT[i * lines_shape[1]:(i + 1) * lines_shape[1], 1]) plt.gca().invert_yaxis() fig.show() raw_input()
range_start=y.min() range_end=y.max() # Add noise y += 0.4*np.random.standard_normal(y.shape) if 1: plt.figure(0) of.plt.set_figure_size_and_location(1000,0,1000,500) plt.clf() plt.subplot(121) plt.cla() plt.plot(x,y,'.',lw=3) plt.title('data') ax = plt.gca() ax.tick_params(axis='y', labelsize=50) ax.tick_params(axis='x', labelsize=30) nPtsDense = 10000 mr = MonotonicRegression(base=[12],nLevels=4) mr.set_dense(domain_start=-10,domain_end=10) mr.set_data(x=x,y=y,range_start=range_start,range_end=range_end) print mr