def pcolormesh(alt, r, t, title='default', shade='flat'): fig, ax = plt.subplots(figsize=(8, 2)) im = ax.pcolormesh(t, alt, r, cmap='jet', shading=shade, norm=mpl.colors.LogNorm(vmin=5e3, vmax=1e6)) plt.colorbar(im, ax=ax) ax.set_ylim(0, 5) converter = mdates.ConciseDateConverter() munits.registry[datetime.datetime] = converter fig.autofmt_xdate() if title == 'default': ax.set_title('pcolormesh curtain') else: try: ax.set_title(title) except: print('Unsupported figure title') return
def gen_steam_cm_graph(self, graphdata: dict): """Make an graph for connection managers""" mpluse("Agg") formats = [ "%y", # ticks are mostly years "%b", # ticks are mostly months "%d", # ticks are mostly days "%H:%M", # hrs "%H:%M", # min "%S.%f", # secs ] zero_formats = [""] + formats[:-1] zero_formats[3] = "%d-%b" offset_formats = [ "", "%Y", "%b %Y", "%d %b %Y", "%d %b %Y", "%d %b %Y %H:%M", ] munits.registry[datetime] = mdates.ConciseDateConverter( formats=formats, zero_formats=zero_formats, offset_formats=offset_formats) cur = graphdata["start"] x = [] for _ in range(len(graphdata["data"])): cur += graphdata["step"] x.append(cur) x = [datetime.utcfromtimestamp(_x / 1000) for _x in x] y = graphdata["data"] graphfile = BytesIO() with pyplot.style.context( path.join(bundled_data_path(self), "discord.mplstyle")): fig, ax = pyplot.subplots() ax.plot(x, y) ax.set_ylim(bottom=0) ax.grid() ax.set(xlabel="Date", ylabel="%", title="Steam Connection Managers") ax.set_yticks(np.arange(0, 100, 5)) fig.savefig(graphfile) pyplot.close(fig) graphfile.seek(0) return graphfile
def formatDateTicks(f, ax): r""" Use some reasonable auto-formatting for time-based x ticks. Taken from here: https://matplotlib.org/3.1.0/gallery/ticks_and_spines/date_concise_formatter.html """ formats = [ '%y', # ticks are mostly years '%b', # ticks are mostly months '%d', # ticks are mostly days '%H:%M', # hrs '%H:%M', # min '%S.%f', ] # secs # these can be the same, except offset by one level.... zero_formats = [''] + formats[:-1] # ...except for ticks that are mostly hours, then its nice to have month-day zero_formats[3] = '%d-%b' offset_formats = [ '', '%Y', '%b %Y', '%d %b %Y', '%d %b %Y', '%d %b %Y %H:%M', ] converter = mdates.ConciseDateConverter(formats=formats, zero_formats=zero_formats, offset_formats=offset_formats) munits.registry[np.datetime64] = converter munits.registry[datetime.date] = converter munits.registry[datetime.datetime] = converter return f, ax
import numpy as np import pandas as pd from datetime import datetime import matplotlib.units as munits import matplotlib.dates as mdates import matplotlib.pyplot as plt converter = mdates.ConciseDateConverter() munits.registry[np.datetime64] = converter munits.registry[datetime.date] = converter munits.registry[datetime] = converter def seven_day_average(data): n = 7 ret = np.cumsum(data, dtype=float) ret[n:] = ret[n:] - ret[:-n] return ret / n def first_and_second_by_state(state): df = pd.read_html( f"https://covidlive.com.au/report/daily-vaccinations-first-doses/{state.lower()}" )[1] first = np.array(df['FIRST'][::-1]) first_dates = np.array( [np.datetime64(datetime.strptime(d, '%d %b %y'), 'D') for d in df['DATE'][::-1]] ) df = pd.read_html(
from matplotlib import pyplot as plt from matplotlib import dates from matplotlib import units import pandas as pd import numpy as np import datetime import os FORMATS = ['%Y', '%b', '%d', '%H:%M', '%H:%M', '%S.%f'] ZERO_FORMATS = ['', '%b\n%Y', '%d %b', '%d %b', '%H:%M', '%H:%M'] OFFSET_FORMATS = ['', '%Y', '%b %Y', '%d %b %Y', '%d %b %Y', '%d %b %Y %H:%M'] converter = dates.ConciseDateConverter(formats=FORMATS, zero_formats=ZERO_FORMATS, offset_formats=OFFSET_FORMATS) units.registry[np.datetime64] = converter units.registry[datetime.date] = converter units.registry[datetime.datetime] = converter SMALL_SIZE = 12 MEDIUM_SIZE = 14 plt.rc('font', size=SMALL_SIZE) plt.rc('axes', labelsize=MEDIUM_SIZE) plt.rc('xtick', labelsize=SMALL_SIZE) plt.rc('ytick', labelsize=SMALL_SIZE) plt.rc('legend', fontsize=MEDIUM_SIZE) plt.rc('figure', autolayout=True) plt.rc('axes', grid=True) plt.rc('legend', loc='upper left', borderaxespad=0)
ax.xaxis.set_major_locator(locator) ax.xaxis.set_major_formatter(formatter) ax.plot(dates, y) ax.set_xlim(lims[nn]) axs[0].set_title('Concise Date Formatter') plt.show() ############################################################################# # If all calls to axes that have dates are to be made using this converter, # it is probably most convenient to use the units registry where you do # imports: import matplotlib.units as munits converter = mdates.ConciseDateConverter() munits.registry[np.datetime64] = converter munits.registry[datetime.date] = converter munits.registry[datetime.datetime] = converter fig, axs = plt.subplots(3, 1, figsize=(6, 6), constrained_layout=True) for nn, ax in enumerate(axs): ax.plot(dates, y) ax.set_xlim(lims[nn]) axs[0].set_title('Concise Date Formatter') plt.show() ############################################################################# # Localization of date formats # ============================
[0.87422, 0.24526, 0.03297], [0.86760, 0.23730, 0.03082], [0.86079, 0.22945, 0.02875], [0.85380, 0.22170, 0.02677], [0.84662, 0.21407, 0.02487], [0.83926, 0.20654, 0.02305], [0.83172, 0.19912, 0.02131], [0.82399, 0.19182, 0.01966], [0.81608, 0.18462, 0.01809], [0.80799, 0.17753, 0.01660], [0.79971, 0.17055, 0.01520], [0.79125, 0.16368, 0.01387], [0.78260, 0.15693, 0.01264], [0.77377, 0.15028, 0.01148], [0.76476, 0.14374, 0.01041], [0.75556, 0.13731, 0.00942], [0.74617, 0.13098, 0.00851], [0.73661, 0.12477, 0.00769], [0.72686, 0.11867, 0.00695], [0.71692, 0.11268, 0.00629], [0.70680, 0.10680, 0.00571], [0.69650, 0.10102, 0.00522], [0.68602, 0.09536, 0.00481], [0.67535, 0.08980, 0.00449], [0.66449, 0.08436, 0.00424], [0.65345, 0.07902, 0.00408], [0.64223, 0.07380, 0.00401], [0.63082, 0.06868, 0.00401], [0.61923, 0.06367, 0.00410], [0.60746, 0.05878, 0.00427], [0.59550, 0.05399, 0.00453], [0.58336, 0.04931, 0.00486], [0.57103, 0.04474, 0.00529], [0.55852, 0.04028, 0.00579], [0.54583, 0.03593, 0.00638], [0.53295, 0.03169, 0.00705], [0.51989, 0.02756, 0.00780], [0.50664, 0.02354, 0.00863], [0.49321, 0.01963, 0.00955], [0.47960, 0.01583, 0.01055] ] cmap = mcolors.ListedColormap(turbo_colormap_data) matplotlib.cm.register_cmap(name='turbo', cmap=cmap) cmap = mcolors.ListedColormap(turbo_colormap_data[::-1]) matplotlib.cm.register_cmap(name='turbo_r', cmap=cmap) munits.registry[np.datetime64] = mdates.ConciseDateConverter() munits.registry[datetime.date] = mdates.ConciseDateConverter() munits.registry[datetime.datetime] = mdates.ConciseDateConverter()
def Show_Graph(self): ################################## converter = mdates.ConciseDateConverter() munits.registry[datetime.date] = converter munits.registry[datetime.datetime] = converter ################################## EndDate = date.today() d = timedelta(days=6) date1 = EndDate - d d = timedelta(days=1) ################################## # MatPlotLib Bar Chart Code YMin = 0 YMax = 0 Y_calories = [] X_days = [] ################################## for i in range(7): Y = i * 200 Y_calories.append(Y) X_days.append(date1) date1 = date1 + d if (Y > YMax): YMax = Y ################################## plt.clf() plt.cla() plt.ylim(YMin, YMax) width1 = float(0.5) plt.bar(x=X_days, height=Y_calories, width=width1, label='eaten', zorder=2) strNum = 'Past ' + str(7) + ' Days' ################################## # Show the major grid lines plt.grid(b=True, which='major', color='#444444', linestyle='-', alpha=0.5, zorder=1) ################################## # Show the minor grid lines plt.minorticks_on() ################################## str1 = 'Calories Eaten each Day ' plt.title(str1) plt.xlabel(strNum) plt.ylabel('Calories per Day') plt.gcf() plt.savefig(fname='./Pic.png', format='png', transparent=True) ################################## Y_calories.clear() X_days.clear() ################################## self.iPic.reload() if (self.iPic.parent == None): self.add_widget(self.iPic) ################################## return
def create_emd_from_lightcurve(self, label, figsize=(20, 28)): from matplotlib import gridspec as gs from PyEMD import EMD import pandas as pd import numpy as np import matplotlib.pyplot as plt from pandas.plotting import register_matplotlib_converters register_matplotlib_converters() import glob from datetime import timedelta from matplotlib import rc from multiprocessing import Process emd = EMD() font = {"family": "DejaVu", "weight": "normal", "size": 16} rc("font", **font) numpyrow, numpycol = self.translate_number(int(label), self.nrowcol) picture_folder_id = f"{numpycol:02d}_{numpyrow:02d}" path_to_lcurves = self.global_save_path path_to_picture = f"{self.global_save_path}{picture_folder_id}/" dataset = pd.read_csv(f"{path_to_lcurves}complete_lcurves.csv") time = pd.to_datetime(dataset["Time"]) region = dataset[f"{label}"] imfs = emd(region.values) image_list = sorted(glob.glob(f"{path_to_picture}*.png")) vector_index_div = np.array_split(np.array(np.arange(len(image_list))), self.n_cpus) proc = [] colors = ["black", "blue", "red"] fig = plt.figure(figsize=(10, 20)) time_curr = time[0] lineset = {} min_height = 0 max_height = 0 for index, i in enumerate(imfs): if index < len(imfs) - 1: if i.min() < min_height: min_height = i.min() if i.max() > max_height: max_height = i.max() # Draw all IMFs for index, i in enumerate(imfs): if index != len(imfs) - 1: fig.add_subplot(len(imfs), 1, len(imfs) - 1 - index) ax = plt.gca() plt.plot(time, i, color=colors[0], linewidth=4, label=f"IMF {index}") ybot, ytop = ax.get_ylim() delta_y = ytop - ybot move_by = (max_height - min_height) / 2 ax.set_ylim(ybot - move_by, ytop + move_by) ax.yaxis.set_label_position("right") ax.set_ylim(min_height, max_height) ax.set(ylabel=f"IMF {index + 1}", xlabel="", xticks=[]) lineset[f"{index}"] = ax.axvline(x=time_curr, color="black", alpha=0.4, linewidth=5) # Draw original signal + last IMF (residual) fig.add_subplot(len(imfs), 1, len(imfs)) ax1 = plt.gca() plt.plot(time, region, color=colors[1], linewidth=5, label="Signal") plt.plot( time, imfs[len(imfs) - 1], color=colors[2], linestyle="dashed", label="Residual", linewidth=4, ) lineset["signal"] = ax1.axvline(x=time_curr, color="black", alpha=0.4, linewidth=5) ax1.set(ylabel=f"Original Signal") import matplotlib.dates as mdates import matplotlib.units as munits import datetime converter = mdates.ConciseDateConverter() munits.registry[np.datetime64] = converter munits.registry[datetime.date] = converter munits.registry[datetime.datetime] = converter plt.gcf().autofmt_xdate() plt.legend() ax1.yaxis.set_label_position("right") save_emd = f"{path_to_picture}/emd_results/" os.makedirs(save_emd, exist_ok=True) def multi_proc(index_sublist, save_path=save_emd): for curr_obs in index_sublist: # Draw all of the lines# for k in list(lineset): lineset[k].set_xdata(time[curr_obs]) fig.canvas.draw() plt.savefig(f"{save_path}{curr_obs:03d}") for list_indices_proc in vector_index_div: if list_indices_proc.size: # noinspection PyTypeChecker pr = Process(target=multi_proc, args=([list_indices_proc])) proc.append(pr) pr.start() for process in proc: process.join() # Then combine all of these from Scripts.Imports.Data_analysis.Tools import imgrecomb_any imgrecomb_any( path_1=f"{path_to_picture}", path_2=f"{save_emd}", save_path=f"{path_to_picture}with_EMD/", figsize=figsize, )
def plot_time(dataset, time_col, clust_col="prediction", k=None, save_path=None): """ Plot the trend of error messages over time (per each cluster). -- params: dataset (pyspark.sql.dataframe.DataFrame): data frame with predictions and message times time_col (string): name of the unix time in milliseconds clust_col (string): name of the cluster prediction column k (int): number of clusters. If specified the executing time decreases. Default None save_path (string): where to save output figures. Default None (no saving) Returns: None """ import pyspark.sql.functions as F import os import datetime from pathlib import Path from matplotlib import pyplot as plt import matplotlib.dates as mdates import matplotlib.units as munits dataset = ( dataset.filter(F.col(time_col) > 0) # ignore null values .withColumn("datetime_str", F.from_unixtime(F.col('timestamp_tr_comp') / 1000)) # datetime (string) .withColumn("datetime", F.to_timestamp(F.col('datetime_str'), 'yyyy-MM-dd HH:mm')) # datetime (numeric) .select(clust_col, "datetime")) if k: clust_ids = [{"prediction": i} for i in range(0, k)] else: clust_ids = dataset.select(clust_col).distinct().collect() if save_path: print("Saving time plots to: {}".format(save_path)) for clust_id in clust_ids: cluster = dataset.filter( F.col(clust_col) == clust_id[clust_col]).select("datetime") # cluster = cluster.groupBy("datetime").agg(F.count("datetime").alias("freq")).orderBy("datetime", ascending=True) cluster = cluster.toPandas() try: res_sort = cluster.datetime.value_counts(bins=24 * 6).sort_index() except ValueError: print("""WARNING: time column completely empty. Errors time trend cannot be displayed for cluster {}""".format( clust_id[clust_col])) continue x_datetime = [interval.right for interval in res_sort.index] converter = mdates.ConciseDateConverter() munits.registry[datetime.datetime] = converter fig, ax = plt.subplots(figsize=(10, 5)) # ax.plot(res_sort.index, res_sort) ax.plot(x_datetime, res_sort.values) min_h = min(x_datetime) # res_sort.index.min() max_h = max(x_datetime) # res_sort.index.max() day_min = str(min_h)[:10] day_max = str(max_h)[:10] # title = f"{'Cluster {} - init:'.format(3):>25}{day_min:>15}{str(min_h)[11:]:>12}" + \ # f"\n{' - end:':>25}{day_max:>15}{str(max_h)[11:]:>12}" title = "Cluster {} - day: {}".format(clust_id[clust_col], day_min) plt.title(title) if save_path: save_path = Path(save_path) save_path.mkdir(parents=True, exist_ok=True) outname = save_path / "cluster_{}.png".format(clust_id[clust_col]) # print("Saving time plots to: {}".format(outname)) if os.path.isfile(outname): os.remove(outname) fig.savefig(outname, format='png', bbox_inches='tight') else: plt.show()
def concise_date_all(): import matplotlib.units as munits converter = mdates.ConciseDateConverter() munits.registry[np.datetime64] = converter
def plotting(data, lim, title=None, sav_path=None, yrange=[5, 0.1], cscale=[10**3.5, 10**8.5], save=1): files = list(data.keys()) if type(sav_path) is not str: sav_path = r'C:\Users\Magnolia\OneDrive - UMBC\Research\Figures\Preliminary' i = 0 t = [] alt = [] r = [] alt1 = [] alt2 = [] for file in files: uL_t, uL_alt, uL_r, uL_alt1, uL_alt2 = lidar.variables(data, file) if i == 0: t, alt, r, alt1, alt2 = lidar.variables(data, file) i += 1 if i > 1: t = np.append(t, uL_t) alt = np.append(alt, uL_alt) r = np.hstack((r, uL_r)) alt1 = np.append(alt1, uL_alt1) alt2 = np.append(alt2, uL_alt2) # Date Axis Handling # converter = mdates.ConciseDateConverter() munits.registry[datetime.datetime] = converter if type(title) is str: title = title else: title = 'blank' fig, ax = plt.subplots(figsize=(12, 3), constrained_layout=True) im = plt.imshow(r, extent=[t[0], t[-1], np.mean(alt2), np.mean(alt1)], cmap='jet', aspect='auto', norm=LogNorm()) cbar = plt.colorbar(extend='both') im.set_clim(vmin=10**3.5, vmax=10**8.5) cbar.set_label('Aerosol Backscatter') ax.set_xlabel('Datetime (UTC)') ax.set_ylabel('Altitude (m)') ax.set_ylim(yrange) ax.set_title(f'{title}') plt.gca().invert_yaxis() # Flip the image so its not upside down ax.xaxis_date() lims = [np.datetime64(lim[0]), np.datetime64(lim[1])] ax.set_xlim(lims) fig.autofmt_xdate() characters_to_remove = "!()@:" pattern = "[" + characters_to_remove + "]" new_string = re.sub(pattern, "", title) if save == 1: plt.savefig(f"{sav_path}\{new_string}.png", dpi=600) plt.show() output = {'t': t, 'alt': alt, 'r': r, 'alt1': alt1, 'alt2': alt} return output
def plotting(files, lim, title=None, sav_path=None, yrange=[5, 0.1]): def variables(file): t = data[file].time alt = data[file].range # Gathering the backscatter (i.e. beta_raw) r = data[file].beta_raw # Flip the image r = np.array(r).T # Filter for negative values r[r < 0] = np.nan np.warnings.filterwarnings('ignore') ''' Merge Days (couple days) ''' alt1 = np.min(alt) / 1000 alt2 = np.max(alt) / 1000 x_lims = mdates.date2num(t) return x_lims, alt, r, alt1, alt2 if type(sav_path) is not str: sav_path = r'C:\Users\Magnolia\OneDrive - UMBC\Research\Figures\Preliminary' i = 0 t = [] alt = [] r = [] alt1 = [] alt2 = [] for file in files: uL_t, uL_alt, uL_r, uL_alt1, uL_alt2 = variables(file) if i == 0: t, alt, r, alt1, alt2 = variables(file) i += 1 if i > 1: t = np.append(t, uL_t) alt = np.append(alt, uL_alt) r = np.hstack((r, uL_r)) alt1 = np.append(alt1, uL_alt1) alt2 = np.append(alt2, uL_alt2) # Date Axis Handling # converter = mdates.ConciseDateConverter() munits.registry[datetime.datetime] = converter if type(title) is str: title = title else: title = file fig, ax = plt.subplots(figsize=(12, 3), constrained_layout=True) im = plt.imshow(r, extent=[t[0], t[-1], np.mean(alt2), np.mean(alt1)], cmap='jet', aspect='auto', norm=LogNorm()) cbar = plt.colorbar(extend='both') im.set_clim(vmin=10**3.5, vmax=10**8.5) cbar.set_label('Aerosol Backscatter') ax.set_xlabel('Datetime (UTC)') ax.set_ylabel('Altitude (m)') ax.set_ylim([5, 0.1]) ax.set_title(f'{title}') plt.gca().invert_yaxis() ax.xaxis_date() lims = [np.datetime64(lim[0]), np.datetime64(lim[1])] ax.set_xlim(lims) fig.autofmt_xdate() characters_to_remove = "!()@:" pattern = "[" + characters_to_remove + "]" new_string = re.sub(pattern, "", title) plt.savefig(f"{sav_path}\{new_string}.png", dpi=600) plt.show() data1 = {'t': t, 'alt': alt, 'r': r, 'alt1': alt1, 'alt2': alt} return data1