import matplotlib.pyplot as plt from matplotlib.finance import quotes_historical_yahoo_ochl from matplotlib.dates import YearLocator, MonthLocator, DateFormatter import datetime date1 = datetime.date(1995, 1, 1) date2 = datetime.date(2004, 4, 12) years = YearLocator() # every year months = MonthLocator() # every month yearsFmt = DateFormatter('%Y') quotes = quotes_historical_yahoo_ochl('INTC', date1, date2) if len(quotes) == 0: raise SystemExit dates = [q[0] for q in quotes] opens = [q[1] for q in quotes] fig, ax = plt.subplots() ax.plot_date(dates, opens, '-') # format the ticks ax.xaxis.set_major_locator(years) ax.xaxis.set_major_formatter(yearsFmt) ax.xaxis.set_minor_locator(months) ax.autoscale_view() # format the coords message box def price(x): return '$%1.2f' % x
#All devices # ind = inds[2] # folder = '/Users/admin/Documents/powernet/powernet_markets_mysql/'+run + '_' + ind # directory = run + '_' + ind + '_vis' # df_system = pd.read_csv(directory+'/df_system.csv',index_col=[0], parse_dates=True) # lns = lns + ax.plot(df_system.index,df_system['measured_real_power'],label='25% PV/Batt/EVs') ax.plot(df_system_fast.index, [0.0] * len(df_system_fast.index), 'k', linewidth=1.0) ax.set_ylabel('Measured system load [MW]') start = pd.Timestamp(2015, 7, 15) # df_system_fast.index[0] end = pd.Timestamp(2015, 7, 17) # df_system_fast.index[-1] ax.set_xlim(xmin=start, xmax=end) ax.set_ylim(ymin=0.0) ax.xaxis.set_major_locator(HourLocator(arange(0, 25, 6))) ax.xaxis.set_minor_locator(HourLocator(arange(0, 25, 3))) ax.xaxis.set_major_formatter(DateFormatter('%H:%M')) #Legend labs = [l.get_label() for l in lns] L = ax.legend(lns, labs, bbox_to_anchor=(0.5, -0.3), loc='lower center', ncol=len(labs)) ppt.savefig(run + '/11_measuredload_EV_fastslow.png', bbox_inches='tight')
# Pad the space on the top to allow for an annotation below suptitle fig.suptitle('Daily Reported Cases and Deaths in the US*', fontsize=22) fig.subplots_adjust(top=0.9) plt.annotate(f"*Percentage approximate. Updated on {date_str}", (175, 400), fontsize=12, xycoords='axes pixels') # Annotate with the total number of cases and deaths. ax.annotate( 'Percent Infected: {:.2f}%\nTotal Cases: {:,}\nTotal Deaths: {:,}'. format(percent_infected, usa_cases, usa_deaths), (10, 260), fontsize=14, xycoords='axes pixels') # Formate the x-axis dates date_form = DateFormatter("%b") ax.xaxis.set_major_formatter(date_form) # Make axes tick labels the color of the graph highest_cases, highest_deaths = max(usa_daily.new_cases), max( usa_daily.new_deaths) ax2.tick_params(axis='y', labelcolor='purple') ax.set_ylim([0, highest_cases + highest_cases // 10]) ax2.set_ylim([0, highest_deaths + highest_deaths // 10]) # Format the legend and grid ax.legend(loc='upper left') ax2.legend(loc='upper left', bbox_to_anchor=(0, 0.92)) ax2.grid(False) if __name__ == '__main__':
def view(self, eFreqI: int, **kwargs) -> plt.figure: """Plot statistics for evaluation frequency index Plots a simple scatter of each statistic with datetime on the xaxis (datetime of the window start dates). Number of subplots is equal to numStaStatPerWindow. Parameters ---------- eFreqI : int Evaluation frequency index maskwindows : List, np.ndarray Global windows to exclude fig : matplotlib.pyplot.figure, optional A figure object plotfonts : Dict, optional A dictionary of plot fonts label : str, optional Label for the plots clim : List, optional Limits for colourbar axis xlim : List, optional Limits for the x axis ylim : List, optional Limits for the y axis colortitle : str, optional Title for the colourbar legened : bool Boolean flag for adding a legend Returns ------- plt.figure Matplotlib figure object """ # get windows to plot and global dates maskWindows = kwargs["maskwindows"] if "maskwindows" in kwargs else [] plotIndices = self.getMaskedIndices(maskWindows) globalDates = self.getGlobalDates() eFreq = self.evalFreq[eFreqI] # plot params nrows, ncols = self.getRowsCols(self.maxcols) plotfonts = kwargs[ "plotfonts"] if "plotfonts" in kwargs else getViewFonts() fig: plt.figure = (plt.figure(kwargs["fig"].number) if "fig" in kwargs else plt.figure(figsize=(4 * ncols, 5 * nrows))) st = fig.suptitle( "{} data for evaluation frequency: {}".format( self.statName, eFreq), fontsize=plotfonts["suptitle"], ) st.set_y(0.98) # plot the data for idx, val in enumerate(self.winStats): ax = plt.subplot(nrows, ncols, idx + 1) plt.title("Value {}".format(val), fontsize=plotfonts["title"]) label = kwargs["label"] if "label" in kwargs else eFreq # limit the data by plotIndices if not False plotData = np.squeeze(self.stats[:, eFreqI, idx]) plotDates = globalDates if plotIndices: plotData = plotData[plotIndices] plotDates = plotDates[plotIndices] # the colourdata colourbool, colourdata, cmap = self.calcColourData( plotData, val, eFreqI, kwargs) # scatter plot if not colourbool: scat = plt.scatter( plotDates, plotData, edgecolors="none", marker="o", s=12, label=label, ) else: scat = plt.scatter( plotDates, plotData, c=colourdata, edgecolors="none", marker="o", s=12, cmap=cmap, label=label, ) clim = (kwargs["clim"] if ("clim" in kwargs and len(kwargs["clim"]) > 0) else [colourdata.min(), colourdata.max()]) scat.set_clim(clim) # x axis options plt.xlabel("Time", fontsize=plotfonts["axisLabel"]) xlim = (kwargs["xlim"] if ("xlim" in kwargs and len(kwargs["xlim"]) > 0) else [globalDates[0], globalDates[-1]]) plt.xlim(xlim) ax.format_xdata = DateFormatter("%H-%M-%S") fig.autofmt_xdate() # y axis options if "ylim" in kwargs and len(kwargs["ylim"]) > 0: plt.ylim(kwargs["ylim"]) plt.ylabel("Value {}".format(val), fontsize=plotfonts["axisLabel"]) # set tick sizes for label in ax.get_xticklabels() + ax.get_yticklabels(): label.set_fontsize(plotfonts["axisTicks"]) plt.grid(True, ls="--") # legend if "legend" in kwargs and kwargs["legend"]: plt.legend(loc=4) # show if the figure is not in keywords if "fig" not in kwargs: if colourbool: fig.tight_layout(rect=[0.02, 0.02, 0.85, 0.92]) cax = fig.add_axes([0.88, 0.10, 0.03, 0.80]) colourtitle = (kwargs["colortitle"] if "colortitle" in kwargs else "Value") self.addColourbar(scat, cax, colourtitle, plotfonts) else: fig.tight_layout(rect=[0.02, 0.02, 0.98, 0.92]) plt.show() return fig
def TimeBurnDownChart(dots, project): """ 制作工时“燃尽”图 :param dots: [['日期',Y-值],...] :param project: 项目代号 :return: 图文件存放路径 """ global __test """作图""" rcParams.update({ 'font.family': 'sans-serif', 'font.sans-serif': [u'SimHei'], 'axes.unicode_minus': False, 'font.size': 6, }) autodates = AutoDateLocator() yearsFmt = DateFormatter('%Y-%m-%d') fig = figure(figsize=[10, 6], dpi=120) ax = fig.add_subplot(111) fig.autofmt_xdate() # 设置x轴时间外观 ax.xaxis.set_major_locator(autodates) # 设置时间间隔 ax.xaxis.set_major_formatter(yearsFmt) # 设置时间显示格式 """设定显示的时间段""" _end_date = datetime.date.today() + datetime.timedelta(days=10) ax.set_xticks( pd.date_range(start='2017-12-10', end='%s' % _end_date, freq='3D')) # ax.set_xlim("2017-12-10", "%s" % _end_date) # ax.set_ylim(0, total+1) _leg = [None, None, None] for __dot in dots: _spent_lines_dots = {'date': [], 'dot': []} _org_lines_dots = {'date': [], 'dot': []} for _dot in __dot['dots']: if _dot[2] == 'spent': _spent_lines_dots['date'].append(_dot[0]) _spent_lines_dots['dot'].append(_dot[1]) else: _org_lines_dots['date'].append(_dot[0]) _org_lines_dots['dot'].append(_dot[1]) _leg[0] = ax.fill_between(_spent_lines_dots['date'], __dot['count'], _spent_lines_dots['dot'], facecolor='lightcyan', alpha=0.6) _leg[1] = ax.fill_between(_spent_lines_dots['date'], _spent_lines_dots['dot'], 0, facecolor='lightpink', alpha=0.7) _leg[2] = ax.fill_between( _spent_lines_dots['date'], _org_lines_dots['dot'], 0, # _spent_lines_dots['dot'], facecolor='lightyellow', alpha=0.3) # plt.setp(_lines, color='r') ax.set_xlabel(u'日期', fontsize=11) ax.set_ylabel(u'工时', fontsize=11) ax.grid(True) ax.legend(_leg, [u"规划", u"执行", u"估计"], loc=1, fontsize=12) """图示各sprint的区域""" trans = transforms.blended_transform_factory(ax.transData, ax.transAxes) for _p in dots: if _p['sprint'][-1] != 'Active': _c = 'lightblue' else: _c = 'lightgreen' ax.fill_between(_p['sprint'][:-1], 0, 1, transform=trans, alpha=0.1, color=_c) ax.plot(_p['sprint'][:-1], [0, _p['count']], color='r', linewidth=1, alpha=0.3) plt.title(u'工时燃尽图', fontsize=12) plt.subplots_adjust(left=0.08, right=0.98, bottom=0.06, top=0.96) _fn = 'D:\\GitHub\\flasky\\app\\static\\images\\%s-time-issue-burndown.png' % project if not __test: plt.savefig(_fn, dpi=120) else: plt.show() return _fn
def chartSetup(self, refresh_interval=100): plt.style.use('dark_background') fig = plt.figure(num=f"Current Viewer {version}", figsize=(10, 6)) self.ax = plt.axes() ax = self.ax ax.set_title(f"Streaming: {connected_device}", color="white") fig.text(0.2, 0.88, f"CurrentViewer {version}", color="yellow", verticalalignment='bottom', horizontalalignment='center', fontsize=9, alpha=0.7) fig.text(0.89, 0.0, f"github.com/MGX3D/CurrentViewer", color="white", verticalalignment='bottom', horizontalalignment='center', fontsize=9, alpha=0.5) ax.set_ylabel("Current draw (Amps)") ax.set_yscale("log", nonpositive='clip') ax.set_ylim(1e-10, 1e1) plt.yticks([ 1.0e-9, 1.0e-8, 1.0e-7, 1.0e-6, 1.0e-5, 1.0e-4, 1.0e-3, 1.0e-2, 1.0e-1, 1.0 ], [ '1nA', '10nA', '100nA', '1\u00B5A', '10\u00B5A', '100\u00B5A', '1mA', '10mA', '100mA', '1A' ], rotation=0) ax.grid(axis="y", which="both", color="yellow", alpha=.3, linewidth=.5) ax.set_xlabel("Time") plt.xticks(rotation=20) ax.set_xlim(datetime.now(), datetime.now() + timedelta(seconds=10)) ax.grid(axis="x", color="green", alpha=.4, linewidth=2, linestyle=":") #ax.xaxis.set_major_locator(SecondLocator()) ax.xaxis.set_major_formatter(DateFormatter('%H:%M:%S')) def on_xlims_change(event_ax): logging.debug("Interactive zoom: {} .. {}".format( num2date(event_ax.get_xlim()[0]), num2date(event_ax.get_xlim()[1]))) chart_len = num2date(event_ax.get_xlim()[1]) - num2date( event_ax.get_xlim()[0]) if chart_len.total_seconds() < 5: self.ax.xaxis.set_major_formatter(DateFormatter('%H:%M:%S.%f')) else: self.ax.xaxis.set_major_formatter(DateFormatter('%H:%M:%S')) self.ax.xaxis.set_minor_formatter(DateFormatter('%H:%M:%S.%f')) ax.callbacks.connect('xlim_changed', on_xlims_change) lines = ax.plot([], [], label="Current")[0] lastText = ax.text(0.50, 0.95, '', transform=ax.transAxes) statusText = ax.text(0.50, 0.50, '', transform=ax.transAxes) self.anim = animation.FuncAnimation(fig, self.getSerialData, fargs=(lines, plt.legend(), lastText), interval=refresh_interval) plt.legend(loc="upper right", framealpha=0.5) apause = plt.axes([0.91, 0.15, 0.08, 0.07]) self.bpause = Button(apause, label='Pause', color='0.2', hovercolor='0.1') self.bpause.on_clicked(self.pauseRefresh) self.bpause.label.set_color('yellow') aanimation = plt.axes([0.91, 0.25, 0.08, 0.07]) self.bsave = Button(aanimation, 'GIF', color='0.2', hovercolor='0.1') self.bsave.on_clicked(self.saveAnimation) self.bsave.label.set_color('yellow') crs = mplcursors.cursor(ax, hover=True) @crs.connect("add") def _(sel): sel.annotation.arrow_patch.set(arrowstyle="simple", fc="yellow", alpha=.4) sel.annotation.set_text(self.textAmp(sel.target[1])) self.framerate = 1000 / refresh_interval plt.gcf().autofmt_xdate() plt.show()
def plot_iono(df): """ Parameters ---------- df : TYPE DESCRIPTION. Returns ------- None. """ fig = plt.figure(figsize=(8, 4)) fig.suptitle("iono", color="tab:gray") ax = fig.add_subplot(211) ax.plot(df.Na, "-o", color="tab:red", ms=10) # lims = ax.get_xlim() # ax.hlines(135, *lims, colors='r', alpha=0.5, linestyles='dashed') # ax.hlines(145, *lims, colors='r', alpha=0.5, linestyles='dashed') usual = [136, 142] spread = set(usual) | set(ax.get_ylim()) ax.set_ylim(min(spread), max(spread)) ax.set_ylabel("$Na^+$", color="tab:red") ax.spines["left"].set_color("tab:red") ax.tick_params(axis="y", colors="tab:red") for spine in ["top", "right"]: ax.spines[spine].set_visible(False) axT = ax.twinx() axT.plot(df.Cl, "-o", color="tab:blue", ms=10) axT.set_ylabel("$Cl^-$", color="tab:blue") usual = [98, 104] spread = set(usual) | set(axT.get_ylim()) axT.set_ylim(min(spread), max(spread)) # lims = axT.get_xlim() # axT.hlines(110, *lims, colors='b', alpha=0.5, linestyles='dashed') # axT.hlines(95, *lims, colors='b', alpha=0.5, linestyles='dashed') axT.spines["right"].set_color("tab:blue") axT.tick_params(axis="y", colors="tab:blue") for spine in ["top", "left"]: axT.spines[spine].set_visible(False) ax2 = fig.add_subplot(212) ax2.plot(df.K, "-o", color="tab:purple", ms=10) ax2.set_ylabel("$K^+$", color="tab:purple") usual = [2.2, 4] spread = set(usual) | set(ax2.get_ylim()) ax2.set_ylim(min(spread), max(spread)) ax2.spines["left"].set_color("tab:purple") ax2.tick_params(axis="y", colors="tab:purple") for spine in ["top", "right"]: ax2.spines[spine].set_visible(False) ax2T = ax2.twinx() ax2T.plot(df.ph, "-o", color="tab:gray", ms=10) ax2T.set_ylabel("pH", color="tab:gray") usual = [7.34, 7.48] spread = set(usual) | set(ax2T.get_ylim()) ax2.set_ylim(min(spread), max(spread)) ax2T.spines["right"].set_color("tab:gray") ax2T.tick_params(axis="y", colors="tab:gray") for spine in ["top", "left"]: ax2T.spines[spine].set_visible(False) for ax in fig.get_axes(): ax.spines["top"].set_visible(False) if df.index.dtype != "<M8[ns]": ax.xaxis.set_ticks(np.arange(len(df))) # ax.xaxis.set_ticklabels(np.arange(len(df))) ax.xaxis.set_ticklabels(df.heure) else: date_format = DateFormatter("%H:%M") ax.xaxis.set_major_formatter(date_format) ax.spines["bottom"].set_color("tab:gray") ax.tick_params(axis="x", colors="tab:gray") fig.tight_layout() return fig
def main(): model_data_path = Path("/RECH2/huziy/BC-MH/bc_mh_044deg/Diagnostics") # model_data_path = Path("/RECH2/huziy/BC-MH/bc_mh_044deg/Samples") static_data_file = "/RECH2/huziy/BC-MH/bc_mh_044deg/Samples/bc_mh_044deg_198001/pm1980010100_00000000p" r = RPN(static_data_file) fldir = r.get_first_record_for_name("FLDR") faa = r.get_first_record_for_name("FAA") lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() gc = default_domains.bc_mh_044 cell_manager = CellManager(fldir, nx=fldir.shape[0], ny=fldir.shape[1], lons2d=lons, lats2d=lats, accumulation_area_km2=faa) selected_station_ids = ["06EA002", ] stations = cehq_station.load_from_hydat_db(province="SK", selected_ids=selected_station_ids, natural=None) # (06EA002): CHURCHILL RIVER AT SANDY BAY at (-102.31832885742188,55.52333068847656), accum. area is 212000.0 km**2 # TODO: plot where is this station, compare modelled and observed hydrographs # for s in stations: # assert isinstance(s, cehq_station.Station) # s.latitude += 0.9 # s.longitude -= 0.2 # print(s) station_to_model_point = cell_manager.get_model_points_for_stations(stations, drainaige_area_reldiff_limit=0.8, nneighbours=1) print(station_to_model_point[stations[0]]) station = stations[0] assert isinstance(station, cehq_station.Station) obs_not_corrected = pd.Series(index=station.dates, data=station.values).groupby( by=lambda d: d.replace(day=15)).mean() obs_corrected = pd.read_csv("mh/obs_data/Churchill Historic Monthly Apportionable Flow_06EA002.csv.bak.original", skiprows=2) print(obs_corrected.head()) print(obs_corrected.year.iloc[0], obs_corrected.year.iloc[-1]) date_index = pd.date_range(start=datetime(obs_corrected.year.iloc[0] - 1, 12, 15), end=datetime(obs_corrected.year.iloc[-1], 12, 15), freq="M") date_index = date_index.shift(15, freq=pd.datetools.day) print(date_index) data = np.concatenate([r for r in obs_corrected.values[:, 1:-1]]) factor = date_index.map(lambda d: 1000 / (calendar.monthrange(d.year, d.month)[1] * 24 * 3600)) print(factor[:10]) obs_corrected = pd.Series(index=date_index, data=data * factor) station_to_modelled_data = get_model_data(station_to_model_point, output_path=model_data_path, grid_config=gc, basins_of_interest_shp=default_domains.MH_BASINS_PATH, cell_manager=cell_manager, vname="STFL") modelled_data = station_to_modelled_data[station] fig = plt.figure() ax = obs_corrected.plot(label="obs corrected") obs_not_corrected.plot(label="obs not corrected", ax=ax, color="k") modelled_data.plot(label="CRCM5", ax=ax, color="r") ax.legend(loc="upper left") img_file = img_folder.joinpath("{}_validation_monthly.png".format(station.id)) fig.savefig(str(img_file)) plt.close(fig) # climatology start_year = 1980 end_year = 2010 date_selector = lambda d: (start_year <= d.year <= end_year) and not ((d.month == 2) and (d.day == 29)) fig = plt.figure() ax = obs_corrected.select(date_selector).groupby(lambda d: d.replace(year=2001)).mean().plot(label="obs corrected") obs_not_corrected.select(date_selector).groupby(lambda d: d.replace(year=2001)).mean().plot( label="obs not corrected", ax=ax, color="k") modelled_data.select(date_selector).groupby(lambda d: d.replace(year=2001)).mean().plot(label="CRCM5", ax=ax, color="r") ax.xaxis.set_major_locator(MonthLocator(bymonthday=15)) ax.xaxis.set_major_formatter(DateFormatter("%b")) ax.legend(loc="upper left") img_file = img_folder.joinpath("{}_validation_clim.png".format(station.id)) fig.savefig(str(img_file)) plt.close(fig) # Interannual variability fig = plt.figure() obs_corrected = obs_corrected.select(lambda d: start_year <= d.year <= end_year) modelled_data = modelled_data.select(lambda d: start_year <= d.year <= end_year) corr_list = [] for m in range(1, 13): obs = obs_corrected.select(lambda d: d.month == m) mod = modelled_data.select(lambda d: d.month == m) print(obs.head()) obs.index = obs.index.map(lambda d: d.year) mod.index = mod.index.map(lambda d: d.year) corr_list.append(obs.corr(mod)) ax = plt.gca() ax.plot(range(1, 13), corr_list) ax.set_xlabel("Month") ax.set_title("Inter-annual variability") img_file = img_folder.joinpath("{}_interannual.png".format(station.id)) fig.tight_layout() fig.savefig(str(img_file), bbox_inches="tight") plt.close(fig)
period = timedelta(hours=24) start = tellus_data.start_time end = start + period fig, ax = plt.subplots() count = 0 variable = DATA_CO2 for device_id in tellus_data.sensors: data = tellus_data.sensors[device_id]['data'] date_filter = (data[DATA_TIMESTAMP] >= start) & (data[DATA_TIMESTAMP] < end) data = data.loc[date_filter] x = data[DATA_TIMESTAMP] y = data[variable] plt.plot(x, y, label=device_id) count = count + 1 if count > 8: # limit to 8 plots break # beautify the x-labels plt.gcf().autofmt_xdate() myFmt = DateFormatter("%H:%M") ax.xaxis.set_major_formatter(myFmt) plt.legend() plt.xlabel('Time of the day') plt.ylabel(variable) plt.title(variable + ' readings on ' + start.strftime('%Y-%m-%d') + ' at ' + room) plt.show() fig.savefig(variable + '-' + room + str(start.strftime('-%Y-%m-%d')) + '.png')
# Plot histogram of percent change fig = stl.plot_histogram(percent_change, location="Singapore") plt.show() # Plot full study area time series mean with plt.style.context("dark_background"): fig, ax = plt.subplots(figsize=(20, 10)) ax.scatter(timestamps_arr, means_arr, facecolor="#ff7f00") ax.set_title("Singapore Mean NO2, Full Study Area, Jul 2018 - Jul 2020", size=24) ax.set_xlabel("Date", size=20) ax.set_ylabel(r"Mean NO2 ($\mathrm{mol \cdot m^{-2}}$)", size=24) plt.xticks(fontsize=20) plt.yticks(fontsize=20) date_form = DateFormatter("%b-%Y") ax.xaxis.set_major_formatter(date_form) fig.text( 0.5, 0.025, "Data Source: European Space Agency", ha="center", fontsize=16, ) plt.show() # =========== # DATA EXPORT # =========== # Export 2019 to 2020 change
def plot_monthly(series_dates, series_record, tbl_name, start_date, end_date, style='fivethirtyeight', save=True): series_dates = pd.to_datetime(series_dates, format="%Y-%m-%d %H:%M:%S") # date_list is require to create x-axis dates_list = [] for i in series_dates: dates_list.append(i) # date1 = dates_list[0] # date2 = dates_list[len(dates_list)-1] # print(dates_list[0]) # Best fit line df_temp = pd.DataFrame() df_temp['days_since'] = ( series_dates - pd.to_datetime(dates_list[0])).astype('timedelta64[D]') lr = LinearRegression() # print(len(series_dates)) # print(len(df_temp['days_since'])) # print(len(series_record)) lr.fit(df_temp[[ 'days_since', ]], series_record) predict = lr.predict(df_temp[[ 'days_since', ]]) # Styling needs to be at top. plt.style.use(style) plt.rcParams['font.family'] = 'serif' plt.rcParams['font.serif'] = 'Ubuntu' plt.rcParams['font.monospace'] = 'Ubuntu Mono' plt.rcParams['font.size'] = 10 plt.rcParams['axes.labelsize'] = 10 plt.rcParams['axes.labelweight'] = 'bold' plt.rcParams['axes.titlesize'] = 18 plt.rcParams['axes.titleweight'] = 'bold' plt.rcParams['xtick.labelsize'] = 8 plt.rcParams['ytick.labelsize'] = 8 plt.rcParams['legend.fontsize'] = 10 plt.rcParams['figure.titlesize'] = 12 # Main part fig = plt.figure() ax = fig.add_subplot(111) print(dates_list) ax.plot(dates_list, series_record, c='#DCD6F7', alpha=0.3 ) #dashes=(1,10), dash_capstyle='round', dash_joinstyle='round' ax.scatter(dates_list, series_record, c='#4E4C67', s=20, alpha=0.8) ax.plot(dates_list, predict, c='#A6B1E1', solid_capstyle='round') #print(plt.style.available) # sns.lmplot(dates_list, series_record, data=series_record, fit_reg=True) # plt.gcf().autofmt_xdate() # Intelligently scaling x-axis labels # If it's under 3 month if len(series_dates) < 110: years = YearLocator() # every year months = MonthLocator(interval=1) # every month days = DayLocator(bymonthday=range(1, 31, 7)) #, interval=5 loc = WeekdayLocator(byweekday=MO) dateFmt_Maj = DateFormatter('%d-%m-%Y %a') dateFmt_Min = DateFormatter('%d') # format the ticks ax.xaxis.set_major_locator(loc) ax.xaxis.set_major_formatter(dateFmt_Maj) ax.xaxis.set_minor_locator(days) ax.xaxis.set_minor_formatter(dateFmt_Min) # ax.autoscale_view() #if it's over 3 month and under year (ish) elif len(series_dates) >= 110 and len(series_dates) < 350: years = YearLocator() # every year months = MonthLocator(interval=1) # every month days = DayLocator(bymonthday=range(7, 31, 7)) #, interval=5 dateFmt_Maj = DateFormatter('%d-%m') dateFmt_Min = DateFormatter('%d') # format the ticks ax.xaxis.set_major_locator(months) ax.xaxis.set_major_formatter(dateFmt_Maj) ax.xaxis.set_minor_locator(days) ax.xaxis.set_minor_formatter(dateFmt_Min) else: years = YearLocator() # every year months = MonthLocator(interval=3) # every month #days = DayLocator(bymonthday=range(1,31,7)) #, interval=5 loc = WeekdayLocator(byweekday=MO) dateFmt_Maj = DateFormatter('%Y') dateFmt_Min = DateFormatter('%m') # format the ticks ax.xaxis.set_major_locator(years) ax.xaxis.set_major_formatter(dateFmt_Maj) ax.xaxis.set_minor_locator(months) ax.xaxis.set_minor_formatter(dateFmt_Min) # Intelligently plot y-axis range lim_list = set_min_max_record(tbl_name, series_record.name, start_date, end_date) # print('lim_list'.format(lim_list)) # print(start_date) # print(type(start_date)) # print(end_date) ax.set_ylim(lim_list) # Plot x-range so it shows whole of x-axis instead of streching from where data is available. days = 1 # So edge values of x-axis doesn't get cut. start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d") start_date = start_date + relativedelta.relativedelta(days=-days) # print(start_date) # print(type(start_date)) end_date = datetime.datetime.strptime(end_date, "%Y-%m-%d") end_date = end_date + relativedelta.relativedelta(days=+days) ax.set_xlim([start_date, end_date]) # name axis ax.set_xlabel(removeUnderLine(series_dates.name), color='#5c5f6d') ax.set_ylabel(removeUnderLine(series_record.name), color='#5c5f6d') ax.set_title(removeUnderLine(tbl_name), color='#5c5f6d') #'#B4869F' # def price(x): # return '$%1.2f' % x # ax.fmt_xdata = DateFormatter('%Y-%m-%d') # ax.fmt_ydata = price # ax.grid(True) fig.autofmt_xdate() if save: # Format file name to save file_date_start = datetime.datetime.strftime(series_dates.iloc[0], '%Y-%m-%d') file_date_end = datetime.datetime.strftime( series_dates.iloc[len(series_dates) - 1], '%Y-%m-%d') fil_name = '{}_{}_to_{}_a01.png'.format(tbl_name, file_date_start, file_date_end) print(fil_name) plt.savefig(fil_name) plt.show()
def plotcsv(tipo, columna, csvPath, csvName, pngPath, pngName, pathLogo, estacion, extraDhi=0, extraDni=0): # Read data from 'file.dat' tabla = numpy.genfromtxt( csvPath + csvName, # Data to be read delimiter=',', names=True, dtype=None) tabla = numpy.unique(tabla) tabla = numpy.sort(tabla) # imprimo el cabezal de las tablas para tenerlas como referencia en caso de error print tabla.dtype.names # guardo las distintas columnas en listas independientes col1 = tabla['Timestamp'] col2 = tabla[columna] if tipo == 'uv': coliuv = tabla['IUV_AV'] if extraDhi: extraColDhi = tabla[extraDhi] if extraDni: extraColDni = tabla[extraDni] # casteo los timestamps a strings fecha = [ dt.datetime.strptime(date, '%Y/%m/%d %H:%M:%S.%f') for date in col1 ] # en las tablas pueden haber literales dato = [] bag = [] error_list = [] j = 0 for n in col2: if n == 'UnderRange' or n == 'OverRange' or n == 'NotYetSet': dato.append(-2.) error_list.append(j) else: dato.append(float(n)) bag.append(float(n)) j += 1 # if fig = plt.figure() ax = fig.add_subplot(111) plt.xticks(fontsize=7) plt.yticks(fontsize=5) # hours = HourLocator(byhour=range(2,23), interval=6) hours = HourLocator(byhour=[6, 12, 18]) ax.xaxis.set_major_locator(DayLocator(bymonthday=range(1, 32))) ax.xaxis.set_major_formatter(FuncFormatter(my_format_function)) ax.xaxis.set_minor_locator(hours) ax.xaxis.set_minor_formatter(DateFormatter("%Hhs")) ax.tick_params(which='minor', labelsize=4.5) # ---------------------- # seteo fin para el dia de ayer con ultima hora minuto y segundo fin = dt.datetime.now() - dt.timedelta(days=1) fin = fin.replace(hour=23) fin = fin.replace(minute=59) fin = fin.replace(second=59) # el comienzo del rango es el dia fin menos 7199 minutos ini = fin - dt.timedelta(minutes=7199) # parche por las diferencias de los timestamps de las medidas fin = fin - dt.timedelta(hours=1) print fin new_fecha = [] new_dato = [] err_dato = [] print estacion print columna if (len(col2) != len(error_list)): print numpy.floor(numpy.amin(bag)) print numpy.ceil(numpy.amax(bag)) if tipo == 'temp': # tmin = numpy.floor(numpy.amin(bag)) # tmax = numpy.ceil(numpy.amax(bag)) # min_dato = tmin - 1 # <-- # err = min_dato - (tmax - min_dato) / 44 err = -6. min_dato = -5.1 elif tipo == 'ghi': err = -32. min_dato = -2. elif tipo == 'dhi': err = -18. min_dato = -2. elif tipo == 'dni': err = -32. min_dato = -2. elif tipo == 'uv': err = -2. min_dato = 0. # if hay_error = False # float('nan') i = 0 if fecha[i] < ini: # adelantar i hasta inicio while i < len(fecha) and fecha[i].strftime( "%Y/%m/%d %H") < ini.strftime("%Y/%m/%d %H"): i += 1 # while # if for minutal in rrule(MINUTELY, dtstart=ini, until=fin): # si existe la entrada de fecha en el csv new_fecha.append(minutal) if i < len(fecha) and minutal.strftime( "%Y/%m/%d %H:%M") == fecha[i].strftime("%Y/%m/%d %H:%M"): if i in error_list: new_dato.append(float('nan')) err_dato.append(err) hay_error = True else: new_dato.append(dato[i]) err_dato.append(min_dato) i += 1 else: # si no existe esa fecha en el csv new_dato.append(float('nan')) err_dato.append(err) hay_error = True # ####################### i = 0 if fecha[i] < ini: # adelantar i hasta inicio while i < len(fecha) and fecha[i].strftime( "%Y/%m/%d %H") < ini.strftime("%Y/%m/%d %H"): i += 1 # while # if j = i if tipo == 'uv': new_iuv = [] for minutal in rrule(MINUTELY, dtstart=ini, until=fin): # si existe la entrada de fecha en el csv if i < len(fecha) and minutal.strftime( "%Y/%m/%d %H:%M") == fecha[i].strftime("%Y/%m/%d %H:%M"): if i in error_list: new_iuv.append(min_dato) else: new_iuv.append(coliuv[i]) i += 1 else: # si no existe esa fecha en el csv new_iuv.append(min_dato) if extraDhi: newExtraDhi = [] for minutal in rrule(MINUTELY, dtstart=ini, until=fin): # si existe la entrada de fecha en el csv if i < len(fecha) and minutal.strftime( "%Y/%m/%d %H:%M") == fecha[i].strftime("%Y/%m/%d %H:%M"): if i in error_list: newExtraDhi.append(min_dato) else: newExtraDhi.append(extraColDhi[i]) i += 1 else: # si no existe esa fecha en el csv newExtraDhi.append(min_dato) if extraDni: newExtraDni = [] for minutal in rrule(MINUTELY, dtstart=ini, until=fin): # si existe la entrada de fecha en el csv if j < len(fecha) and minutal.strftime( "%Y/%m/%d %H:%M") == fecha[j].strftime("%Y/%m/%d %H:%M"): if j in error_list: newExtraDni.append(min_dato) else: newExtraDni.append(extraColDni[j]) j += 1 else: # si no existe esa fecha en el csv newExtraDni.append(min_dato) # ---------------------- if tipo == 'temp': # if hay_error: # vmin = numpy.floor(numpy.amin(new_dato)) # else: # vmin = numpy.floor(numpy.amin(new_dato) - 1) # # if # vmax = numpy.ceil(numpy.amax(new_dato) + 1) vmin = -5 vmax = 50 elif tipo == 'ghi': vmin = 0 vmax = 1400 elif tipo == 'dhi': vmin = 0 vmax = 800 elif tipo == 'dni': vmin = 0 vmax = 1400 elif tipo == 'uv': vmin = 0 vmax = 100 # if if tipo == 'temp': ylabel = u'Temperatura (ºC)' ax.plot_date(new_fecha, new_dato, ls='-', marker="", linewidth=1, color='red', clip_on=False) if hay_error: # ax.plot_date(new_fecha, err_dato, ls='-', marker="", linewidth=1, color='red', clip_on=False) ax.fill_between(new_fecha, err_dato, min_dato, linewidth=0, facecolor='red', alpha=1, clip_on=False, zorder=-100) elif tipo == 'uv': ylabel = u'Irradiancia UVA (W/m²)' ax.plot_date(new_fecha, new_dato, ls='-', marker="", linewidth=0.7, color='#7F00FF', clip_on=False) ax.fill_between(new_fecha, 0, new_dato, linewidth=0, facecolor='#b571fa', alpha=0.7, clip_on=False, zorder=-100) if hay_error: # ax.plot_date(new_fecha, err_dato, ls='-', marker="", linewidth=1, color='red', clip_on=False) ax.fill_between(new_fecha, err_dato, 0, linewidth=0, facecolor='red', alpha=1, clip_on=False, zorder=-100) ax_iuv = ax.twinx() max = numpy.ceil(numpy.amax(new_iuv) + 1) ax_iuv.set_ylim([0, max]) ax_iuv.plot_date(new_fecha, new_iuv, ls='-', marker="", linewidth=0.5, color='red', clip_on=True, zorder=-100) ax_iuv.set_ylabel(u"Índice UV", fontsize=7) for label in ax_iuv.yaxis.get_majorticklabels(): label.set_fontsize(5) elif extraDhi or extraDni: ylabel = u'Irradiancia (W/m²)' ax.plot_date(new_fecha, new_dato, ls='-', marker="", linewidth=0.7, color='#3737FF', clip_on=False) # ax.fill_between(new_fecha, 0, new_dato, linewidth=0, facecolor='#b571fa', alpha=0.4, clip_on=False, zorder=-100) if hay_error: # ax.plot_date(new_fecha, err_dato, ls='-', marker="", linewidth=1, color='red', clip_on=False) ax.fill_between(new_fecha, err_dato, 0, linewidth=0, facecolor='red', alpha=1, clip_on=False, zorder=-100) else: ylabel = u'Irradiancia (W/m²)' ax.plot_date(new_fecha, new_dato, ls='-', marker="", linewidth=0.7, color='#3737ff', clip_on=False) ax.fill_between(new_fecha, 0, new_dato, linewidth=0, facecolor='#bfbfff', alpha=0.7, clip_on=False, zorder=-100) if hay_error: # ax.plot_date(new_fecha, err_dato, ls='-', marker="", linewidth=1, color='red', clip_on=False) ax.fill_between(new_fecha, err_dato, 0, linewidth=0, facecolor='red', alpha=1, clip_on=False, zorder=-100) # if if extraDni: ax_dni = ax.twinx() ax_dni.set_ylim([0, vmax]) ax_dni.plot_date(new_fecha, newExtraDni, ls='-', marker="", linewidth=0.7, color='green', alpha=0.5, clip_on=True, zorder=-100) plt.setp(ax_dni.get_yticklabels(), visible=False) if extraDhi: ax_dhi = ax.twinx() ax_dhi.set_ylim([0, vmax]) ax_dhi.plot_date(new_fecha, newExtraDhi, ls='-', marker="", linewidth=0.7, color='red', alpha=0.5, clip_on=True, zorder=-100) plt.setp(ax_dhi.get_yticklabels(), visible=False) ax.set_title(estacion.decode('utf-8'), fontsize=8) ax.set_xlabel('Fecha', fontsize=7) ax.set_ylabel(ylabel, fontsize=7) ax.grid(True) ax.set_axisbelow(True) ax.set_ylim([vmin, vmax]) # agrego el logo en el documento logo = plt.imread(pathLogo) plt.figimage(logo, 0, 370) if tipo == 'ghi': pie = u"Irradiancia global horizontal" elif tipo == 'dhi': pie = u"Irradiancia difusa horizontal" elif tipo == 'dni': pie = u"Irradiancia directa normal" elif tipo == 'temp': pie = u"Temperatura de aire ambiente" elif tipo == 'uv': pie = u"Irradiancia global horizontal UVA e índice UV" # if if extraDhi or extraDni: fileName = "priv_" + pngName else: fileName = pngName # genero el pie de la imagen, con el logo y la info del archivo if extraDhi or extraDni: plt.annotate("GHI", (0, 0), (-30, -15), color='#3737FF', xycoords='axes fraction', textcoords='offset points', va='top', fontsize=7, family='monospace') if extraDhi: plt.annotate("DHI", (0, 0), (-10, -15), color="red", xycoords='axes fraction', textcoords='offset points', va='top', fontsize=7, family='monospace') if extraDni: plt.annotate("DNI", (0, 0), (10, -15), color="green", xycoords='axes fraction', textcoords='offset points', va='top', fontsize=7, family='monospace') else: plt.annotate(pie, (0, 0), (-30, -15), xycoords='axes fraction', textcoords='offset points', va='top', fontsize=7, family='monospace') # guardo la imagen en la ruta destino fig.set_figheight(1.8) fig.set_figwidth(8) plt.savefig(pngPath + fileName, bbox_inches='tight', dpi=200, transparent=True) plt.close() # cierro el archivo
Author : Yuxing Yan Date : 12/26/2013 email : [email protected] [email protected] """ import datetime import matplotlib.pyplot as plt from matplotlib.finance import quotes_historical_yahoo from matplotlib.dates import MonthLocator, DateFormatter ticker = 'AAPL' begdate = datetime.date(2012, 1, 2) enddate = datetime.date(2013, 12, 4) months = MonthLocator(range(1, 13), bymonthday=1, interval=3) # every 3rd month monthsFmt = DateFormatter("%b '%Y") x = quotes_historical_yahoo(ticker, begdate, enddate) if len(x) == 0: print('Found no quotes') raise SystemExit dates = [q[0] for q in x] closes = [q[4] for q in x] fig, ax = plt.subplots() ax.plot_date(dates, closes, '-') ax.xaxis.set_major_locator(months) ax.xaxis.set_major_formatter(monthsFmt) ax.xaxis.set_minor_locator(mondays) ax.autoscale_view() ax.grid(True) fig.autofmt_xdate() show()
#RC_7_3: 上網抓資料並畫K線圖 import matplotlib.pyplot as plt from matplotlib.dates import DateFormatter from matplotlib.finance import quotes_historical_yahoo_ohlc, candlestick_ohlc #下載資料起迄日, 日期格式與股票代號 start = (2016, 4, 1) end = (2016, 4, 25) weekFormatter = DateFormatter('%b %d') # 例如, Jan 03 2016 quotes = quotes_historical_yahoo_ohlc('AAPL', start, end) #若抓取的資料是空字串則離開系統 if len(quotes) == 0: raise SystemExit #設定繪圖區域的格式化 fig, ax = plt.subplots() ax.xaxis_date() plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right') #畫K線圖並顯示 candlestick_ohlc(ax, quotes, width=0.6) plt.show()
def pandas_candlestick_ohlc(dat, stick="day", otherseries=None): """ :param dat: pandas DataFrame object with datetime64 index, and float columns "Open", "High", "Low", and "Close", likely created via DataReader from "yahoo" :param stick: A string or number indicating the period of time covered by a single candlestick. Valid string inputs include "day", "week", "month", and "year", ("day" default), and any numeric input indicates the number of trading days included in a period :param otherseries: An iterable that will be coerced into a list, containing the columns of dat that hold other series to be plotted as lines This will show a Japanese candlestick plot for stock data stored in dat, also plotting other series if passed. """ mondays = WeekdayLocator(MONDAY) # major ticks on the mondays alldays = DayLocator() # minor ticks on the days dayFormatter = DateFormatter('%d') # e.g., 12 # Create a new DataFrame which includes OHLC data for each period specified by stick input transdat = dat.loc[:, ["Open", "High", "Low", "Close"]] if (type(stick) == str): if stick == "day": plotdat = transdat stick = 1 # Used for plotting elif stick in ["week", "month", "year"]: if stick == "week": transdat["week"] = pd.to_datetime(transdat.index).map( lambda x: x.isocalendar()[1]) # Identify weeks elif stick == "month": transdat["month"] = pd.to_datetime(transdat.index).map( lambda x: x.month) # Identify months transdat["year"] = pd.to_datetime(transdat.index).map( lambda x: x.isocalendar()[0]) # Identify years grouped = transdat.groupby(list(set( ["year", stick]))) # Group by year and other appropriate variable plotdat = pd.DataFrame({ "Open": [], "High": [], "Low": [], "Close": [] }) # Create empty data frame containing what will be plotted for name, group in grouped: plotdat = plotdat.append( pd.DataFrame( { "Open": group.iloc[0, 0], "High": max(group.High), "Low": min(group.Low), "Close": group.iloc[-1, 3] }, index=[group.index[0]])) if stick == "week": stick = 5 elif stick == "month": stick = 30 elif stick == "year": stick = 365 elif (type(stick) == int and stick >= 1): transdat["stick"] = [ np.floor(i / stick) for i in range(len(transdat.index)) ] grouped = transdat.groupby("stick") plotdat = pd.DataFrame({ "Open": [], "High": [], "Low": [], "Close": [] }) # Create empty data frame containing what will be plotted for name, group in grouped: plotdat = plotdat.append( pd.DataFrame( { "Open": group.iloc[0, 0], "High": max(group.High), "Low": min(group.Low), "Close": group.iloc[-1, 3] }, index=[group.index[0]])) else: raise ValueError( 'Valid inputs to argument "stick" include the strings "day", "week", "month", "year", or a positive integer' ) # Set plot parameters, including the axis object ax used for plotting fig, ax = plt.subplots() fig.subplots_adjust(bottom=0.2) if plotdat.index[-1] - plotdat.index[0] < pd.Timedelta('730 days'): weekFormatter = DateFormatter('%b %d') # e.g., Jan 12 ax.xaxis.set_major_locator(mondays) ax.xaxis.set_minor_locator(alldays) else: weekFormatter = DateFormatter('%b %d, %Y') ax.xaxis.set_major_formatter(weekFormatter) ax.grid(True) # Create the candelstick chart candlestick_ohlc(ax, list( zip(list(date2num(plotdat.index.tolist())), plotdat["Open"].tolist(), plotdat["High"].tolist(), plotdat["Low"].tolist(), plotdat["Close"].tolist())), colorup="black", colordown="red", width=stick * .4) # Plot other series (such as moving averages) as lines if otherseries != None: if type(otherseries) != list: otherseries = [otherseries] dat.loc[:, otherseries].plot(ax=ax, lw=1.3, grid=True) ax.xaxis_date() ax.autoscale_view() plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right') plt.show()
import sqlite3 from matplotlib import pyplot as plt from matplotlib.dates import datetime as dt from matplotlib.dates import DateFormatter import time formatter = DateFormatter('%H-%M') conn = sqlite3.connect('25_10_18.2.db') cur=conn.cursor() data=[] symbols=[] symbols2=['CRUDEOIL18NOVFUT'] #symbols2=['USDINR18OCTFUT','CRUDEOIL18NOVFUT','BANKNIFTY18OCTFUT','NIFTY18OCTFUT'] ltp_nifty50=10443.95#kite.ltp(256265)['256265']['last_price'] ltp_niftybank=25177#kite.ltp(260105)['260105']['last_price'] strike_nifty50=100*int(ltp_nifty50/100)-400 strike_niftybank=100*int(ltp_niftybank/100)-400 for i in range(8): symbols.append("NIFTY18OCT"+str(strike_nifty50+(100*i))+"CE") symbols.append("NIFTY18OCT"+str(strike_nifty50+(100*i))+"PE") symbols.append("BANKNIFTY18OCT"+str(strike_niftybank+(100*i))+"CE") symbols.append("BANKNIFTY18OCT"+str(strike_niftybank+(100*i))+"PE") symbols.append("NIFTY18OCTFUT") symbols.append("BANKNIFTY18OCTFUT") symbols.append("BANKNIFTY18OCT25400PE") symbols.append("BANKNIFTY18OCT25200CE")
if __name__ == "__main__": register_matplotlib_converters() # 读取数据 ssec2015 = pd.read_csv("ssec2015.csv") ssec2015 = ssec2015.iloc[:, 1:] print(ssec2015.head(n = 3)) print(ssec2015.tail()) ssec2015.Date = [date2num(datetime.strptime(date, "%Y-%m-%d")) for date in ssec2015.Date] ssec2015list = list() for i in range(len(ssec2015)): ssec2015list.append(ssec2015.iloc[i, :]) print(ssec2015list[0:10]) ax = plt.subplot() mondays = WeekdayLocator(MONDAY) weekFormatter = DateFormatter("%y %b %d") ax.xaxis.set_major_locator(mondays) ax.xaxis.set_major_locator(DayLocator()) ax.xaxis.set_major_formatter(weekFormatter) ax.set_title("SH2015") mf.candlestick_ohlc(ax, ssec2015list, width = 0.7, colorup = "r", colordown = "g") # plt.setp(plt.gca().get_xticklabels(), rotation = 50, horizontalalignment "center") plt.savefig("SH2015-candle.png") # 动量交易 print("动量交易策略") Wanke = pd.read_csv("Vanke.csv") Wanke.index = Wanke.iloc[:, 1] Wanke.index = pd.to_datetime(Wanke.index, format = "%Y-%m-%d") Wanke = Wanke.iloc[:, 2:] print(Wanke.head())
import sys from datetime import date import matplotlib.pyplot as plt import numpy as np today = date.today() start = (today.year - 1, today.month, today.day) symbol = 'DISH' if len(sys.argv) == 2: symbol = sys.argv[1] quotes = quotes_historical_yahoo(symbol, start, today) quotes = np.array(quotes) dates = quotes.T[0] volume = quotes.T[5] alldays = DayLocator() months = MonthLocator() month_formatter = DateFormatter("%b %Y") fig = plt.figure() ax = fig.add_subplot(111) plt.semilogy(dates, volume) ax.xaxis.set_major_locator(months) ax.xaxis.set_minor_locator(alldays) ax.xaxis.set_major_formatter(month_formatter) fig.autofmt_xdate() plt.show()
# Observed mesh (thin subplot on bottom) mw = ax2.pcolormesh(a['DATETIME'], range(3), [a[S['MW var']], a[S['MW var']]], cmap=S['cmap'], vmax=hmax, vmin=hmin) ax2.axes.yaxis.set_ticklabels([]) ax2.set_yticks([]) ax2.set_ylabel('Observed') ax2.set_xlim(hovmoller['valid_1d+'][0], hovmoller['valid_1d+'][-1]) # ax1.grid() ax2.grid() # fig.subplots_adjust(hspace=0, right=0.8) cbar_ax = fig.add_axes([0.82, 0.15, 0.02, 0.7]) cb = fig.colorbar(hv, cax=cbar_ax) cb.ax.set_ylabel('%s (%s)' % (s, S['units'])) # ax1.xaxis.set_major_locator( HourLocator(byhour=[0, 3, 6, 9, 12, 15, 18, 21])) ax2.xaxis.set_major_locator( HourLocator(byhour=[0, 3, 6, 9, 12, 15, 18, 21])) dateFmt = DateFormatter('%b %d\n%H:%M') ax2.xaxis.set_major_formatter(dateFmt) # ax2.set_xlabel(r'Contour shows difference between maximum value in %s km$\mathregular{^{2}}$ box centered at %s %s and the value at the center point.' \ % (half_box*6, locations[stn]['latitude'], locations[stn]['longitude']), fontsize=8) # plt.savefig(SAVE + '%s_%s.png' % (stn, S['MW var']))
coviddaily.sample(2).T # calculate new cases and deaths by day coviddailytotals = coviddaily.loc[coviddaily.casedate.between('2020-02-01','2020-07-12')].\ groupby(['casedate'])[['new_cases','new_deaths']].\ sum().\ reset_index() coviddailytotals.sample(7) # show line charts for new cases and new deaths by day fig = plt.figure() plt.suptitle("New Covid Cases and Deaths By Day Worldwide in 2020") ax1 = plt.subplot(2, 1, 1) ax1.plot(coviddailytotals.casedate, coviddailytotals.new_cases) ax1.xaxis.set_major_formatter(DateFormatter("%b")) ax1.set_xlabel("New Cases") ax2 = plt.subplot(2, 1, 2) ax2.plot(coviddailytotals.casedate, coviddailytotals.new_deaths) ax2.xaxis.set_major_formatter(DateFormatter("%b")) ax2.set_xlabel("New Deaths") plt.tight_layout() fig.subplots_adjust(top=0.88) plt.show() # calculate new cases and new deaths by region and day regiontotals = coviddaily.loc[coviddaily.casedate.between('2020-02-01','2020-07-12')].\ groupby(['casedate','region'])[['new_cases','new_deaths']].\ sum().\ reset_index() regiontotals.sample(7)
def pollyxt_ift_displayLTLCali(tmpFile, saveFolder): ''' Description ----------- Display the housekeeping data from laserlogbook file. Parameters ---------- tmpFile: str the .mat file which stores the housekeeping data. saveFolder: str Usage ----- pollyxt_ift_displayLTLCali(tmpFile) History ------- 2019-01-10. First edition by Zhenping ''' if not os.path.exists(tmpFile): print('{filename} does not exists.'.format(filename=tmpFile)) return # read matlab .mat data try: mat = spio.loadmat(tmpFile, struct_as_record=True) figDPI = mat['figDPI'][0][0] flagWatermarkOn = mat['flagWatermarkOn'][0][0] if mat['partnerLabel'].size: partnerLabel = mat['partnerLabel'][0] else: partnerLabel = '' if mat['LCTime355'].size: thisLCTime355 = mat['LCTime355'][0][:] else: thisLCTime355 = np.array([]) if mat['LCTime532'].size: thisLCTime532 = mat['LCTime532'][0][:] else: thisLCTime532 = np.array([]) if mat['LCTime1064'].size: thisLCTime1064 = mat['LCTime1064'][0][:] else: thisLCTime1064 = np.array([]) if mat['LCTime387'].size: thisLCTime387 = mat['LCTime387'][0][:] else: thisLCTime387 = np.array([]) if mat['LCTime607'].size: thisLCTime607 = mat['LCTime607'][0][:] else: thisLCTime607 = np.array([]) if mat['LC355Status'].size: LC355Status = mat['LC355Status'][0][:] else: LC355Status = np.array([]) if mat['LC532Status'].size: LC532Status = mat['LC532Status'][0][:] else: LC532Status = np.array([]) if mat['LC1064Status'].size: LC1064Status = mat['LC1064Status'][0][:] else: LC1064Status = np.array([]) if mat['LC387Status'].size: LC387Status = mat['LC387Status'][0][:] else: LC387Status = np.array([]) if mat['LC607Status'].size: LC607Status = mat['LC607Status'][0][:] else: LC607Status = np.array([]) if mat['LC355History'].size: LC355History = mat['LC355History'][0][:] else: LC355History = np.array([]) if mat['LC532History'].size: LC532History = mat['LC532History'][0][:] else: LC532History = np.array([]) if mat['LC1064History'].size: LC1064History = mat['LC1064History'][0][:] else: LC1064History = np.array([]) if mat['LC387History'].size: LC387History = mat['LC387History'][0][:] else: LC387History = np.array([]) if mat['LC607History'].size: LC607History = mat['LC607History'][0][:] else: LC607History = np.array([]) if mat['logbookTime'].size: thisLogbookTime = mat['logbookTime'][0][:] else: thisLogbookTime = np.array([]) if mat['flagOverlap'].size: flagOverlap = mat['flagOverlap'][0][:] else: flagOverlap = np.array([]) if mat['flagWindowwipe'].size: flagWindowwipe = mat['flagWindowwipe'][0][:] else: flagWindowwipe = np.array([]) if mat['flagFlashlamps'].size: flagFlashlamps = mat['flagFlashlamps'][0][:] else: flagFlashlamps = np.array([]) if mat['flagPulsepower'].size: flagPulsepower = mat['flagPulsepower'][0][:] else: flagPulsepower = np.array([]) if mat['flagRestart'].size: flagRestart = mat['flagRestart'][0][:] else: flagRestart = np.array([]) if mat['flag_CH_NDChange'].size: flag_CH_NDChange = mat['flag_CH_NDChange'][:] else: flag_CH_NDChange = np.array([]) if mat['flagCH355FR'].size: flagCH355FR = mat['flagCH355FR'][0][:] else: flagCH355FR = np.array([]) if mat['flagCH532FR'].size: flagCH532FR = mat['flagCH532FR'][0][:] else: flagCH532FR = np.array([]) if mat['flagCH1064FR'].size: flagCH1064FR = mat['flagCH1064FR'][0][:] else: flagCH1064FR = np.array([]) if mat['flagCH387FR'].size: flagCH387FR = mat['flagCH387FR'][0][:] else: flagCH387FR = np.array([]) if mat['flagCH607FR'].size: flagCH607FR = mat['flagCH607FR'][0][:] else: flagCH607FR = np.array([]) if mat['flagCH407FR'].size: flagCH407FR = mat['flagCH407FR'][0][:] else: flagCH407FR = np.array([]) if mat['flagCH532FR_X'].size: flagCH532FR_X = mat['flagCH532FR_X'][0][:] else: flagCH532FR_X = np.array([]) else_time = mat['else_time'][:] else_label = mat['else_label'] if mat['WVCaliTime'].size: thisWVCaliTime = mat['WVCaliTime'][0][:] else: thisWVCaliTime = np.array([]) if mat['WVConst'].size: WVConst = mat['WVConst'][0][:] else: WVConst = np.array([]) if mat['depolCaliTime532'].size: thisDepolCaliTime532 = mat['depolCaliTime532'][0][:] else: thisDepolCaliTime532 = np.array([]) if mat['depolCaliConst532'].size: depolCaliConst532 = mat['depolCaliConst532'][0][:] else: depolCaliConst532 = np.array([]) if mat['yLim355'].size: yLim355 = mat['yLim355'][0][:] else: yLim355 = np.array([]) if mat['yLim532'].size: yLim532 = mat['yLim532'][0][:] else: yLim532 = np.array([]) if mat['yLim1064'].size: yLim1064 = mat['yLim1064'][0][:] else: yLim1064 = np.array([]) if mat['yLim_LC_ratio_355_387'].size: yLim_LC_ratio_355_387 = mat['yLim_LC_ratio_355_387'][0][:] else: yLim_LC_ratio_355_387 = np.array([]) if mat['yLim_LC_ratio_532_607'].size: yLim_LC_ratio_532_607 = mat['yLim_LC_ratio_532_607'][0][:] else: yLim_LC_ratio_532_607 = np.array([]) if mat['wvLim'].size: wvLim = mat['wvLim'][0][:] else: wvLim = np.array([]) if mat['depolConstLim532'].size: depolConstLim532 = mat['depolConstLim532'][0][:] else: depolConstLim532 = np.array([]) imgFormat = mat['imgFormat'][:][0] pollyVersion = mat['CampaignConfig']['name'][0][0][0] dataTime = mat['PollyDataInfo']['dataTime'][0][0][0] location = mat['CampaignConfig']['location'][0][0][0] startTime = mat['CampaignConfig']['startTime'][0][0][0] version = mat['PicassoConfig']['PicassoVersion'][0][0][0] fontname = mat['PicassoConfig']['fontname'][0][0][0] except Exception as e: print(e) print('Failed reading %s' % (tmpFile)) return # set the default font matplotlib.rcParams['font.sans-serif'] = fontname matplotlib.rcParams['font.family'] = "sans-serif" # convert matlab datenum tp datetime startTime = datenum_to_datetime(float(startTime[0])) dataTime = datenum_to_datetime(float(dataTime[0])) LCTime355 = [datenum_to_datetime(thisTime) for thisTime in thisLCTime355] LCTime532 = [datenum_to_datetime(thisTime) for thisTime in thisLCTime532] LCTime1064 = [datenum_to_datetime(thisTime) for thisTime in thisLCTime1064] LCTime387 = [datenum_to_datetime(thisTime) for thisTime in thisLCTime387] LCTime607 = [datenum_to_datetime(thisTime) for thisTime in thisLCTime607] logbookTime = [ datenum_to_datetime(thisTime) for thisTime in thisLogbookTime ] elseTime = [ datenum_to_datetime(thisElseTime) for thisElseTime in else_time ] WVCaliTime = [datenum_to_datetime(thisTime) for thisTime in thisWVCaliTime] depolCaliTime532 = [ datenum_to_datetime(thisTime) for thisTime in thisDepolCaliTime532 ] lineColor = { 'overlap': '#f48f42', 'windowwipe': '#ff66ff', 'flashlamps': '#993333', 'pulsepower': '#990099', 'restart': '#ffff00', 'NDChange': '#333300', 'else': '#00ff00' } # display lidar constants at 355mn fig, (ax1, ax2, ax3, ax4, ax5, ax6, ax7) = plt.subplots(7, figsize=(10, 15), sharex=True, gridspec_kw={ 'height_ratios': [1, 1, 1, 1, 1, 1, 1], 'hspace': 0.1 }) plt.subplots_adjust(top=0.96, bottom=0.05, left=0.07, right=0.98) # lidar constants at 355 nm LCTime355 = [ LCTime355[indx] for indx in np.arange(0, len(LCTime355)) if LC355Status[indx] == 2 ] p1 = ax1.scatter(LCTime355, LC355History[LC355Status == 2], s=7, c='#0000ff', marker='o', label='lidar constant') # default line for create legend l1 = ax1.axvline(x=0, linestyle='--', color=lineColor['overlap'], label='overlap') l2 = ax1.axvline(x=0, linestyle='--', color=lineColor['pulsepower'], label='pulsepower') l3 = ax1.axvline(x=0, linestyle='--', color=lineColor['windowwipe'], label='windowwipe') l4 = ax1.axvline(x=0, linestyle='--', color=lineColor['restart'], label='restart') l5 = ax1.axvline(x=0, linestyle='--', color=lineColor['flashlamps'], label='flashlamps') l6 = ax1.axvline(x=0, linestyle='--', color=lineColor['NDChange'], label='NDChange') l7 = ax1.axvline(x=0, linestyle='--', color=lineColor['else'], label=else_label[0]) ax1.legend(handles=[p1, l1, l2, l3, l4, l5, l6, l7], loc='upper left', fontsize=11) for iLogbookInfo in np.arange(0, len(logbookTime)): if flagOverlap[iLogbookInfo]: ax1.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['overlap']) if flagPulsepower[iLogbookInfo]: ax1.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['pulsepower']) if flagWindowwipe[iLogbookInfo]: ax1.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['windowwipe']) if flagRestart[iLogbookInfo]: ax1.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['restart']) if flagFlashlamps[iLogbookInfo]: ax1.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['flashlamps']) if flag_CH_NDChange[iLogbookInfo, flagCH355FR == 1]: ax1.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['NDChange']) for elseTime in else_time: ax1.axvline(x=elseTime, linestyle='--', color=lineColor['else']) ax1.set_ylabel('LC @ 355nm') ax1.grid(False) ax1.set_title('Lidar constants for {instrument} at {location}'.format( instrument=pollyVersion, location=location), fontsize=20) ax1.set_ylim(yLim355.tolist()) ax1.set_xlim([startTime - timedelta(days=2), dataTime + timedelta(days=2)]) # lidar constant at 532 nm LCTime532 = [ LCTime532[indx] for indx in np.arange(0, len(LCTime532)) if LC532Status[indx] == 2 ] p1 = ax2.scatter(LCTime532, LC532History[LC532Status == 2], s=7, c='#0000ff', marker='o') for iLogbookInfo in np.arange(0, len(logbookTime)): if flagOverlap[iLogbookInfo]: ax2.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['overlap']) if flagPulsepower[iLogbookInfo]: ax2.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['pulsepower']) if flagWindowwipe[iLogbookInfo]: ax2.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['windowwipe']) if flagRestart[iLogbookInfo]: ax2.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['restart']) if flagFlashlamps[iLogbookInfo]: ax2.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['flashlamps']) if flag_CH_NDChange[iLogbookInfo, flagCH532FR == 1]: ax2.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['NDChange']) for elseTime in else_time: ax2.axvline(x=elseTime, linestyle='--', color=lineColor['else']) ax2.set_ylabel('LC @ 532nm') ax2.grid(False) ax2.set_ylim(yLim532.tolist()) ax2.set_xlim([startTime - timedelta(days=2), dataTime + timedelta(days=2)]) # lidar constant at 1064 nm LCTime1064 = [ LCTime1064[indx] for indx in np.arange(0, len(LCTime1064)) if LC1064Status[indx] == 2 ] p1 = ax3.scatter(LCTime1064, LC1064History[LC1064Status == 2], s=7, c='#0000ff', marker='o') for iLogbookInfo in np.arange(0, len(logbookTime)): if flagOverlap[iLogbookInfo]: ax3.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['overlap']) if flagPulsepower[iLogbookInfo]: ax3.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['pulsepower']) if flagWindowwipe[iLogbookInfo]: ax3.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['windowwipe']) if flagRestart[iLogbookInfo]: ax3.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['restart']) if flagFlashlamps[iLogbookInfo]: ax3.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['flashlamps']) if flag_CH_NDChange[iLogbookInfo, flagCH1064FR == 1]: ax3.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['NDChange']) for elseTime in else_time: ax3.axvline(x=elseTime, linestyle='--', color=lineColor['else']) ax3.set_ylabel('LC @ 1064nm') ax3.grid(False) ax3.set_ylim(yLim1064.tolist()) ax3.set_xlim([startTime - timedelta(days=2), dataTime + timedelta(days=2)]) # transmission ratio at 355/387 nm flagRamanLC = np.logical_and(LC355Status == 2, LC387Status == 2) LCTimeRaman = [ LCTime387[indx] for indx in np.arange(0, len(LCTime387)) if flagRamanLC[indx] ] p1 = ax4.scatter(LCTimeRaman, LC355History[flagRamanLC] / LC387History[flagRamanLC], s=7, c='#0000ff', marker='o') for iLogbookInfo in np.arange(0, len(logbookTime)): if flagOverlap[iLogbookInfo]: ax4.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['overlap']) if flagPulsepower[iLogbookInfo]: ax4.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['pulsepower']) if flagWindowwipe[iLogbookInfo]: ax4.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['windowwipe']) if flagRestart[iLogbookInfo]: ax4.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['restart']) if flagFlashlamps[iLogbookInfo]: ax4.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['flashlamps']) if flag_CH_NDChange[iLogbookInfo, flagCH355FR == 1] or \ flag_CH_NDChange[iLogbookInfo, flagCH387FR == 1]: ax4.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['NDChange']) for elseTime in else_time: ax4.axvline(x=elseTime, linestyle='--', color=lineColor['else']) ax4.set_ylabel('Ratio 355/387') ax4.grid(False) ax4.set_ylim(yLim_LC_ratio_355_387.tolist()) ax4.set_xlim([startTime - timedelta(days=2), dataTime + timedelta(days=2)]) # transmission ratio at 532/607 nm flagRamanLC = np.logical_and(LC532Status == 2, LC607Status == 2) LCTimeRaman = [ LCTime607[indx] for indx in np.arange(0, len(LCTime607)) if flagRamanLC[indx] ] p1 = ax5.scatter(LCTimeRaman, LC532History[flagRamanLC] / LC607History[flagRamanLC], s=7, c='#0000ff', marker='o') for iLogbookInfo in np.arange(0, len(logbookTime)): if flagOverlap[iLogbookInfo]: ax5.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['overlap']) if flagPulsepower[iLogbookInfo]: ax5.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['pulsepower']) if flagWindowwipe[iLogbookInfo]: ax5.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['windowwipe']) if flagRestart[iLogbookInfo]: ax5.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['restart']) if flagFlashlamps[iLogbookInfo]: ax5.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['flashlamps']) if flag_CH_NDChange[iLogbookInfo, flagCH532FR == 1] or \ flag_CH_NDChange[iLogbookInfo, flagCH607FR == 1]: ax5.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['NDChange']) for elseTime in else_time: ax5.axvline(x=elseTime, linestyle='--', color=lineColor['else']) ax5.set_ylabel('Ratio 532/607') ax5.grid(False) ax5.set_ylim(yLim_LC_ratio_532_607.tolist()) ax5.set_xlim([startTime - timedelta(days=2), dataTime + timedelta(days=2)]) # wv calibration constant p1 = ax6.scatter(WVCaliTime, WVConst, s=7, c='#0000ff', marker='o') for iLogbookInfo in np.arange(0, len(logbookTime)): if flagOverlap[iLogbookInfo]: ax6.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['overlap']) if flagPulsepower[iLogbookInfo]: ax6.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['pulsepower']) if flagWindowwipe[iLogbookInfo]: ax6.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['windowwipe']) if flagRestart[iLogbookInfo]: ax6.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['restart']) if flagFlashlamps[iLogbookInfo]: ax6.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['flashlamps']) if flag_CH_NDChange[iLogbookInfo, flagCH407FR == 1] or \ flag_CH_NDChange[iLogbookInfo, flagCH387FR == 1]: ax6.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['NDChange']) for elseTime in else_time: ax6.axvline(x=elseTime, linestyle='--', color=lineColor['else']) ax6.set_ylabel('WV const [g*kg^{-1}]') ax6.grid(False) ax6.set_ylim(wvLim.tolist()) ax6.set_xlim([startTime - timedelta(days=2), dataTime + timedelta(days=2)]) # depolarization calibration constant at 532 nm p1 = ax7.scatter(depolCaliTime532, depolCaliConst532, s=7, c='#0000ff', marker='o') for iLogbookInfo in np.arange(0, len(logbookTime)): if flagOverlap[iLogbookInfo]: ax7.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['overlap']) if flagPulsepower[iLogbookInfo]: ax7.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['pulsepower']) if flagWindowwipe[iLogbookInfo]: ax7.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['windowwipe']) if flagRestart[iLogbookInfo]: ax7.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['restart']) if flagFlashlamps[iLogbookInfo]: ax7.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['flashlamps']) if flag_CH_NDChange[iLogbookInfo, flagCH532FR == 1] or \ flag_CH_NDChange[iLogbookInfo, flagCH532FR_X == 1]: ax7.axvline(x=logbookTime[iLogbookInfo], linestyle='--', color=lineColor['NDChange']) for elseTime in else_time: ax7.axvline(x=elseTime, linestyle='--', color=lineColor['else']) ax7.set_ylabel('$\eta_{532}$') ax7.set_xlabel('Date (mm-dd)') ax7.set_ylim(depolConstLim532.tolist()) ax7.xaxis.set_major_formatter(DateFormatter('%m-%d')) ax7.grid(False) ax7.set_xlim([startTime - timedelta(days=2), dataTime + timedelta(days=2)]) # add watermark if flagWatermarkOn: rootDir = os.path.dirname( os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) im_license = matplotlib.image.imread( os.path.join(rootDir, 'img', 'by-sa.png')) newax_license = fig.add_axes([0.60, 0.01, 0.10, 0.02], zorder=10) newax_license.imshow(im_license, alpha=0.8, aspect='auto') newax_license.axis('off') fig.text(0.71, 0.01, 'Preliminary\nResults.', fontweight='bold', fontsize=12, color='red', ha='left', va='bottom', alpha=0.8, zorder=10) fig.text(0.84, 0.01, u"\u00A9 {1} {0}.\nCC BY SA 4.0 License.".format( datetime.now().strftime('%Y'), partnerLabel), fontweight='bold', fontsize=7, color='black', ha='left', va='bottom', alpha=1, zorder=10) fig.text(0.03, 0.03, startTime.strftime("%Y"), fontsize=12) fig.text(0.03, 0.02, 'Version: {version}'.format(version=version), fontsize=12) fig.savefig(os.path.join( saveFolder, '{pollyType}_{date}_long_term_cali_results.{imgFormat}'.format( pollyType=pollyVersion, date=dataTime.strftime('%Y%m%d'), imgFormat=imgFormat)), dpi=figDPI) plt.close()
import matplotlib.pyplot as plt from matplotlib.dates import DateFormatter, WeekdayLocator,\ DayLocator, MONDAY from matplotlib.finance import quotes_historical_yahoo_ohlc, candlestick_ohlc # (Year, month, day) tuples suffice as args for quotes_historical_yahoo date1 = (2004, 2, 1) date2 = (2004, 4, 12) mondays = WeekdayLocator(MONDAY) # major ticks on the mondays alldays = DayLocator() # minor ticks on the days weekFormatter = DateFormatter('%b %d') # e.g., Jan 12 dayFormatter = DateFormatter('%d') # e.g., 12 quotes = quotes_historical_yahoo_ohlc('INTC', date1, date2) if len(quotes) == 0: raise SystemExit fig, ax = plt.subplots() fig.subplots_adjust(bottom=0.2) ax.xaxis.set_major_locator(mondays) ax.xaxis.set_minor_locator(alldays) ax.xaxis.set_major_formatter(weekFormatter) #ax.xaxis.set_minor_formatter(dayFormatter) #plot_day_summary(ax, quotes, ticksize=3) candlestick_ohlc(ax, quotes, width=0.6) ax.xaxis_date() ax.autoscale_view() plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right')
def doDotBase(title, y_label, x_label, datas, limit=None, label_pos=None, lines=None, ylines=None, dots=None): rcParams.update({ 'font.family': 'sans-serif', 'font.sans-serif': [u'SimHei'], 'axes.unicode_minus': False, 'font.size': 8, }) _show_date = False if _show_date: autodates = AutoDateLocator() yearsFmt = DateFormatter('%Y-%m-%d %H:%M:%S') fig = plt.figure(figsize=[10, 12], dpi=120) fig.autofmt_xdate() plt.xticks(rotation=45) ax = fig.add_subplot(111) ax_twiny = ax.twiny() ax_twiny.grid(False) ax_twiny.set_xticks([]) ax.xaxis.set_major_locator(autodates) # 设置时间间隔 ax.xaxis.set_major_formatter(yearsFmt) # 设置时间显示格式 ax.set_xticks( pd.date_range(start='2017-9-27 00:00:00', end='2018-03-31 23:59:59', freq='3D')) """设定显示的时间段""" _day = datetime.date.today().day _month = datetime.date.today().month if _day < 27: _day += 3 else: _month += 1 _day = 1 _end_date = datetime.date.today().replace(day=_day, month=_month) ax.set_xlim("2017-9-1 00:00:00", "%s 00:00:00" % _end_date) else: plt.figure() for _data in datas: _color = _data[1] _dot = _data[2] _label = _data[3] plt.plot(range(len(_data[0])), _data[0], _dot, label=_label, color=_color) if limit is not None: ylim(limit[0], limit[1]) if dots is not None: for _dot in dots: plt.scatter(_dot[0], _dot[1], marker=_dot[2], color=_dot[3], label=_dot[4], alpha=0.5) if lines is not None: for _line in lines: plt.axvline(x=_line[0], linestyle=_line[1], linewidth=2, color=_line[2], label=_line[3]) if ylines is not None: for _line in ylines: plt.axhline(y=_line[0], linestyle=_line[1], linewidth=1, color=_line[2], label=_line[3]) plt.ylabel(y_label) plt.xlabel(x_label) plt.title(title) if label_pos is None: plt.legend() else: plt.legend(loc=label_pos) _fn = 'pic/%s-bar.png' % time.time() if not __test: plt.savefig(_fn, dpi=120) else: plt.show() return _fn
print(yearlyR.head(n=2)) # Print total hours studied programming allTimeProgHours = progPomYear[0] + progPomYear[1] + progPomYear[2] print('\n\nTotal time spent studying programming: ', str(allTimeProgHours)) # Print total pom hours for 2016 totalYearPomHours = progPomYear[1] + otherPomYear[1] print('\n\nTotal Pom hours for 2016: ', str(totalYearPomHours)) # Print total pom hours for 2017 totalYearPomHours = progPomYear[2] + otherPomYear[2] print('\n\nTotal Pom hours for 2017: ', str(totalYearPomHours)) # Format the dates for plots DWformatter = DateFormatter('%-m-%d') Mformatter = DateFormatter('%b') Yformatter = DateFormatter('%Y') # Plot the daily data in a stacked bar plot mpl_fig2 = plt.figure() ax2 = mpl_fig2.add_subplot(111) p1 = ax2.bar(forDailyPlot.index, forDailyPlot[' Prog'], width=.75, label='Programming') p2 = ax2.bar(forDailyPlot.index, forDailyPlot[' Misc'], width=.75, label='Other', color=(1.0, 0.5, 0.62),
def doIssueAction(issues, dots, figsize=[10, 12]): """ 制作“活动分布”图 :param issues: 任务 :param dots: 活动 :param figsize: 图大小 :return: """ global __test _issue_point_marker = { 'timeoriginalestimate': 'v', 'timeestimate': 'o', 'timespent': '^', 'WorklogTimeSpent': 's', 'status': '+', u"bug产生人": '*', 'resolution': '>' } _issue_point_color = { 'timeoriginalestimate': 0, 'timeestimate': 1, 'timespent': 2, 'WorklogTimeSpent': 3, 'status': 4, u"bug产生人": 5, 'resolution': 6 } """ _issue_point_marker = {"agg_time": 'v', "org_time": 'o', "spent_time": '^', "status": 's', "updated": '+', "landmark": '>', "sprint": '<', "users": 'x', "epic_link": 'D', "lastViewed": 'p', } _issue_point_color = {"agg_time": 1, "org_time": 2, "spent_time": 3, "status": 4, "updated": 5, "landmark": 6, "sprint": 7, "users": 8, "epic_link": 9, "lastViewed": 10, } """ # _colors = plt.cm.BuPu(np.linspace(1, 255, len(_issue_point_marker)+1)); hsv; jet _colors = plt.cm.hsv(np.linspace(0.5, 1., len(_issue_point_marker))) """作图""" rcParams.update({ 'font.family': 'sans-serif', 'font.sans-serif': [u'SimHei'], 'axes.unicode_minus': False, 'font.size': 6, }) autodates = AutoDateLocator() yearsFmt = DateFormatter('%Y-%m-%d') fig = figure(figsize=figsize, dpi=120) ax = fig.add_subplot(111) fig.autofmt_xdate() # 设置x轴时间外观 ax.xaxis.set_major_locator(autodates) # 设置时间间隔 ax.xaxis.set_major_formatter(yearsFmt) # 设置时间显示格式 """设定显示的时间段""" _day = datetime.date.today().day _month = datetime.date.today().month if _day < 27: _day += 3 else: _month += 1 _day = 1 _end_date = datetime.date.today().replace(day=_day, month=_month) ax.set_xticks( pd.date_range(start='2018-02-01', end='%s' % _end_date, freq='3D')) ax.set_xlim("2018-02-01", "%s" % _end_date) ax.set_yticks(range(1, len(issues) + 1)) ax.set_yticklabels(issues, ) ax.set_ylim(0, len(issues) + 1) _leg = [] for __i in range(len(_issue_point_marker) + 1): _leg.append(None) for _dot in dots: if _dot[2] in _issue_point_marker: _marker = _issue_point_marker[_dot[2]] _index = _issue_point_color[_dot[2]] _c = _colors[_index] # _c = 'k' _leg[_index] = ax.scatter(_dot[0], _dot[1], color=_c, marker=_marker, s=30, alpha=0.7) ax.set_xlabel(u'日期', fontsize=11) ax.set_ylabel(u'任务', fontsize=11) ax.grid(True) ax.legend( _leg, [u"预估工时", u"设置工时", u"花费工时", u"记工时日志", u"状态修改", u"bug报告", u"解决问题"], loc=2, fontsize=12) plt.title(u'任务活动分布图', fontsize=12) plt.subplots_adjust(left=0.10, right=0.98, bottom=0.06, top=0.96) dt = datetime.datetime.now() _fn = 'pic/%s-issue-action.png' % dt.strftime('%Y%m%d%H%M%S%f') if not __test: plt.savefig(_fn, dpi=120) else: plt.show() return _fn
def plot_station_data(site_data): '''Given a pandas dataframe containing all weather data for a specific station, this function saves a plot with the last 3 days worth of weather data for that station (or as much data as available if not yet 3-days). Parameters: site_data (dataframe): pandas dataframe containing all data, both directly observed and calculated, for a specific station Returns: None *saves plots to plot_dir listed near top of script* ''' latest = site_data.index[-1] site = site_data['station'][0] lower_site = site.lower() site_slice = site_data.loc[site_data.index >= ( latest - timedelta(hours=72))] #slice data to last 72hrs timestamp_end = site_slice.index[-1].strftime( '%Y%m%d%H%M') #timestamp end for saving .csv files dt = site_slice.index[:] #define dt for making subplots sitetitle = site_slice['name'][0] #define sitetitle for fig title graphtimestamp_start = dt[0].strftime( "%m/%d/%y") #start date, for fig title graphtimestamp = dt[-1].strftime("%m/%d/%y") #end date, for fig title markersize = 1.5 #markersize, for subplots linewidth = 1.0 #linewidth, for subplots fig = plt.figure() #create figure fig.set_size_inches(10, 10) #size figure if max(site_slice['snow_depth [cm]'] ) > 0: #six axes if there is snow depth ax1 = fig.add_subplot(6, 1, 1) ax2 = fig.add_subplot(6, 1, 2, sharex=ax1) ax3 = fig.add_subplot(6, 1, 3, sharex=ax1) ax4 = fig.add_subplot(6, 1, 4, sharex=ax1) ax5 = fig.add_subplot(6, 1, 5, sharex=ax1) ax6 = fig.add_subplot(6, 1, 6, sharex=ax1) ax6.set_xlabel('Time (UTC)') else: #five axes if no snow depth ax1 = fig.add_subplot(5, 1, 1) ax2 = fig.add_subplot(5, 1, 2, sharex=ax1) ax3 = fig.add_subplot(5, 1, 3, sharex=ax1) ax4 = fig.add_subplot(5, 1, 4, sharex=ax1) ax5 = fig.add_subplot(5, 1, 5, sharex=ax1) ax5.set_xlabel('Time (UTC)') ax1.set_title(site + ' ' + sitetitle + ', NY ' + graphtimestamp_start + ' - ' + graphtimestamp) #title figure #plot airT and dewT if 'temp_2m [degC]' in site_slice.keys(): airT = site_slice['temp_2m [degC]'] ax1.plot_date(dt, airT, 'o-', label="Temp", color="blue", linewidth=linewidth, markersize=markersize) if 'dew_point_temp_2m [degC]' in site_slice.keys(): Td = site_slice['dew_point_temp_2m [degC]'] ax1.plot_date(dt, Td, 'o-', label="Dew Point", color="black", linewidth=linewidth, markersize=markersize) if ax1.get_ylim()[0] < 0 < ax1.get_ylim()[1]: ax1.axhline(0, linestyle='-', linewidth=1.0, color='deepskyblue') trans = transforms.blended_transform_factory( ax1.get_yticklabels()[0].get_transform(), ax1.transData) ax1.text(0, 0, '0C', color="deepskyblue", transform=trans, ha="right", va="center") #light blue line at 0 degrees C ax1.set_ylabel('2m Temp ($^\circ$C)') ax1.legend(loc='best', ncol=2) axes = [ax1] #begin axes list #plot wind speed and gust if 'avg_wind_speed_merge [m/s]' in site_slice.keys(): wnd_spd = site_slice[ 'avg_wind_speed_merge [m/s]'] * 1.94384 #convert to knots ax2.plot_date(dt, wnd_spd, 'o-', label='Speed', color="forestgreen", linewidth=linewidth, markersize=markersize) if 'max_wind_speed_merge [m/s]' in site_slice.keys(): wnd_gst = site_slice[ 'max_wind_speed_merge [m/s]'] * 1.94384 #convert to knots max_wnd_gst = wnd_gst.max(skipna=True) ax2.plot_date(dt, wnd_gst, 'o-', label='Gust (Max ' + str(round(max_wnd_gst, 1)) + 'kt)', color="red", linewidth=linewidth, markersize=markersize) ax2.set_ylabel('Wind (kt)') ax2.legend(loc='best', ncol=2) axes.append(ax2) #plot wind direction if 'wind_direction_merge [degree]' in site_slice.keys(): wnd_dir = site_slice['wind_direction_merge [degree]'] ax3.plot_date(dt, wnd_dir, 'o-', label='Direction', color="purple", linewidth=0.2, markersize=markersize) ax3.set_ylim(-10, 370) ax3.set_ylabel('Wind Direction') ax3.set_yticks([0, 90, 180, 270, 360]) #locking y-ticks for wind direction axes.append(ax3) #plot MSLP (or station pressure, if MSLP unavailable) if 'mean_sea_level_pressure [mbar]' in site_slice.keys(): mslp = site_slice['mean_sea_level_pressure [mbar]'] min_mslp = mslp.min(skipna=True) #min 3-day mslp value max_mslp = mslp.max(skipna=True) #max 3-day mslp value labelname = 'Min ' + str(round(min_mslp, 2)) + 'hPa, Max ' + str( round(max_mslp, 2)) + 'hPa' ax4.plot_date(dt, mslp, 'o-', label=labelname, color='darkorange', linewidth=linewidth, markersize=markersize) ax4.set_ylabel('MSLP (hPa)') elif 'station_pressure [mbar]' in site_slice.keys(): sp = site_slice['station_pressure [mbar]'] min_sp = sp.min(skipna=True) #min 3-day station pressure value max_sp = sp.max(skipna=True) #max 3-day station pressure value labelname = 'Min ' + str(round(min_sp, 2)) + 'hPa, Max ' + str( round(max_sp, 2)) + 'hPa' ax4.plot_date(dt, sp, 'o-', label=labelname, color='darkorange', linewidth=linewidth, markersize=markersize) ax4.set_ylabel('STATION Pressure (hPa)') print('unable to get mslp, used station pressure instead') ax4.legend(loc='best') axes.append(ax4) #plot precip accum if 'precip_incremental [mm]' in site_slice.keys(): precip_inc = site_slice['precip_incremental [mm]'] precip_accum = 0.0 precip_accum_list = [] for increment in precip_inc: #calculate precip accumulation precip_accum = precip_accum + increment precip_accum_list.append(precip_accum) max_precip = max(precip_accum_list) labelname = 'Precip (' + str(round(max_precip, 2)) + 'mm)' ax5.plot_date(dt, precip_accum_list, 'o-', label=labelname, color='navy', linewidth=linewidth, markersize=markersize) if max_precip > 0: ax5.set_ylim(-0.1 * max_precip, max_precip + max_precip * 0.2) else: ax5.set_ylim(-0.5, 5) ax5.legend(loc='best') ax5.set_ylabel('Precip (mm)') axes.append(ax5) #plot snow depth if 'snow_depth [cm]' in site_slice.keys() and max( site_slice['snow_depth [cm]']) > 0: snow_depth = site_slice['snow_depth [cm]'] * 10 #convert to mm max_snow_depth = snow_depth.max(skipna=True) min_snow_depth = snow_depth.min(skipna=True) labelname = 'Min Depth ' + str(round( min_snow_depth, 2)) + 'mm, Max Depth ' + str( round(max_snow_depth, 2)) + 'mm' ax6.plot_date(dt, snow_depth, 'o-', label=labelname, color='deepskyblue', linewidth=linewidth, markersize=markersize) ax6.set_ylim(-0.1 * max_snow_depth, max_snow_depth + max_snow_depth * 0.2) if max_snow_depth > 0: ax5.set_ylim(-0.1 * max_snow_depth, max_snow_depth + max_snow_depth * 0.2) ax6.legend(loc='best') ax6.set_ylabel('Snow Depth (mm)') axes.append(ax6) for ax in axes: ax.spines["top"].set_visible(False) #remove dark borders on subplots ax.spines["right"].set_visible(False) ax.spines["left"].set_visible(False) ax.spines["bottom"].set_visible(False) ax.tick_params(axis='x', which='both', bottom='on', top='off') #add ticks at labeled times ax.tick_params(axis='y', which='both', left='on', right='off') ax.xaxis.set_major_locator(DayLocator()) #one date written per day ax.xaxis.set_major_formatter( DateFormatter('%b-%d')) #show date, written as 'Jul-12' ax.xaxis.set_minor_locator(HourLocator(np.linspace( 6, 18, 3))) #hour labels every 6 hours ax.xaxis.set_minor_formatter(DateFormatter('%H')) #show hour labels ax.fmt_xdata = DateFormatter('Y%m%d%H%M%S') #fixes labels ax.yaxis.grid(linestyle='--') #adds y-axis grid lines ax.get_yaxis().set_label_coords( -0.06, 0.5) #properly places y-labels away from figure #define dates in YYYYmmdd format (for saving and finding files) three_days_ago_date = (latest - timedelta(hours=72)).strftime('%Y%m%d') two_days_ago_date = (latest - timedelta(hours=48)).strftime('%Y%m%d') yesterday_date = (latest - timedelta(hours=24)).strftime('%Y%m%d') today_date = latest.strftime('%Y%m%d') plot_path = plot_dir + '/' + today_date if not os.path.exists(plot_path): os.mkdir(plot_path) plt.savefig(plot_path + '/ops.nys_ground.' + timestamp_end + '.' + lower_site + '.png', bbox_inches='tight') plt.close() print('plotted ' + site)
def main(): if regenerate_data: created = {} seen_times = {} seen_ups = {} seen_scores = {} with open(InputFilename, 'r') as f: for line in f: parts = [x for x in line.strip().split("\t")] if len(parts) != 5: raise Exception(f"Unexpected data here: {line}") # this_time = int(parts[2][0:3]) this_time = datetime.datetime.strptime( parts[0].split(".")[0], "%Y-%m-%d %H:%M:%S") - datetime.timedelta(hours=7) this_id = parts[1] this_created = datetime.datetime.fromtimestamp(float(parts[2])) this_ups = int(parts[3]) this_score = int(parts[4]) if this_id not in created: created[this_id] = this_created assert (this_id not in seen_times) seen_times[this_id] = [] seen_ups[this_id] = [] seen_scores[this_id] = [] assert (created[this_id] == this_created) if this_time < this_created + datetime.timedelta(hours=4): seen_times[this_id].append(this_time) seen_ups[this_id].append(this_ups) seen_scores[this_id].append(this_score) with open(CreatedPickle, 'wb') as f: pickle.dump(created, f) with open(SeenTimesPickle, 'wb') as f: pickle.dump(seen_times, f) with open(SeenUpsPickle, 'wb') as f: pickle.dump(seen_ups, f) with open(SeenScoresPickle, 'wb') as f: pickle.dump(seen_scores, f) with open(CreatedPickle, 'rb') as f: created = pickle.load(f) with open(SeenTimesPickle, 'rb') as f: seen_times = pickle.load(f) with open(SeenUpsPickle, 'rb') as f: seen_ups = pickle.load(f) with open(SeenScoresPickle, 'rb') as f: seen_scores = pickle.load(f) print(f"Found {len(created)} unique submissions") ax = plt.subplot() # Only show submissions created since we started logging beginning_of_log = min([x[0] for x in seen_times.values() if len(x) > 0]) created = [x for x in created if (created[x] > beginning_of_log)] # Only show submissions that reached thresh # thresh = 50 # created = [x for x in created if len(seen_scores[x]) > 0 and max(seen_scores[x]) >= thresh] # Only show a random sample of submissions # proportion = 0.1 # created = [x for x in created if random.uniform(0, 1) < proportion] print(f"After filtering, {len(created)} will be displayed") for id in created: plt.plot_date(seen_times[id], seen_ups[id], xdate=True, markersize=1, marker=".", linestyle="solid") rule = rrulewrapper(DAILY, interval=1) loc = RRuleLocator(rule) ax.xaxis.set_major_locator(loc) formatter = DateFormatter('%m/%d') ax.xaxis.set_major_formatter(formatter) ax.set_ylim([0, 500]) plt.tight_layout() plt.show()
from pathlib import Path import epimargin.plots as plt import numpy as np import pandas as pd from epimargin.smoothing import convolution from matplotlib.dates import DateFormatter root = Path.home() / "Dropbox" / "Covid" results = root / "results" data = Path("./data") plt.set_theme("substack") formatter = DateFormatter("%b\n%Y") # fig 1 meta_ifrs = pd.read_stata(data / "meta_ifrs.dta") all_location_comparison = pd.read_stata(data / "all_location_comparison.dta") ## male levin_male = meta_ifrs[(meta_ifrs.location == "levin") & ( meta_ifrs.male == 1.0)].query('age <= 70').query('age >= 10') od_male = meta_ifrs[(meta_ifrs.location == "od") & ( meta_ifrs.male == 1.0)].query('age <= 70').query('age >= 10') cai_bihar_male = all_location_comparison[ (all_location_comparison.location == "Bihar") & (all_location_comparison.male == 1.0)].query('ifr > 0') cai_mumbai_male = all_location_comparison[ (all_location_comparison.location == "Mumbai") & (all_location_comparison.male == 1.0)].sort_values("age_bin_pooled")
infile = "/scratch/dknapp4/Western_Hawaii/Moorea/moorea_sample_coral_output_sr_20190716.csv" tdata = pd.read_csv(infile) header = list(tdata) header.pop(0) thedates = np.array(tdata['Date'], dtype='S8') data = np.asarray(tdata[header]) np.save("quick1.npy", data) np.save("quick2.npy", thedates) ## data = np.load("quick1.npy") ## thedates = np.load("quick2.npy") thedates = np.asarray( [date(int(day[0:4]), int(day[4:6]), int(day[6:8])) for day in thedates]) rule = rrulewrapper(MONTHLY, interval=1) loc = RRuleLocator(rule) formatter = DateFormatter('%m/%d/%y') with PdfPages('coral_change_moorea_sr_mean_rev20190716.pdf') as pdf: ## Page 1, Red fig = plt.figure(figsize=(8, 10)) ax = plt.subplot(2, 1, 1) ax.set_title('') good = np.not_equal(data[:, 0], -9.) tmean = np.mean(data[good, 0:8], axis=1) tsdev = np.std(data[good, 0:8], axis=1) plt.errorbar(thedates[good], tmean, yerr=tsdev, fmt='-bo', capsize=3) ax.xaxis.set_major_locator(loc) ax.xaxis.set_major_formatter(formatter) ax.xaxis.set_tick_params(rotation=30, labelsize=10) ## plt.plot_date(thedates[good], tmean, 'b')
from pylab import * from matplotlib.dates import DateFormatter, WeekdayLocator, HourLocator, DayLocator, MONDAY from matplotlib.finance import quotes_historical_yahoo, candlestick, plot_day_summary, candlestick2 from matplotlib.font_manager import FontProperties font = FontProperties(fname=r"c:\windows\fonts\simsun.ttc",size=18) # 定义起始、终止日期和股票代码 date1 = ( 2012, 12, 25 ) date2 = ( 2013, 6, 1 ) stock_num = '000002.sz' # 定义日期格式 mondays = WeekdayLocator(MONDAY) alldays = DayLocator() weekFormatter = DateFormatter('%b %d') dayFormatter = DateFormatter('%d') # 获取股票数据 quotes = quotes_historical_yahoo(stock_num, date1, date2) if len(quotes) == 0: raise SystemExit # 绘制蜡烛线或美国线 fig = figure() fig.subplots_adjust(bottom=0.2) ax = fig.add_subplot(111) ax.xaxis.set_major_locator(mondays) ax.xaxis.set_minor_locator(alldays) ax.xaxis.set_major_formatter(weekFormatter) #注释掉下面的其中一行,可以得到蜡烛线或美国线