def register_converter_cb(key): from pandas.plotting import register_matplotlib_converters from pandas.plotting import deregister_matplotlib_converters if cf.get_option(key): register_matplotlib_converters() else: deregister_matplotlib_converters()
def register_converter_cb(key): from pandas.plotting import register_matplotlib_converters from pandas.plotting import deregister_matplotlib_converters if cf.get_option(key): register_matplotlib_converters() else: deregister_matplotlib_converters()
def register_converter_cb(key) -> None: from pandas.plotting import ( deregister_matplotlib_converters, register_matplotlib_converters, ) if cf.get_option(key): register_matplotlib_converters() else: deregister_matplotlib_converters()
def _plot_network_relative_to_ref_station(df_plot, ref, target_stns, batch_options, filter_options, display_options): """ Compute relative residuals and send to plotting function. :param df_plot: Pandas dataframe containing only events which are common to ref station and target stations. :type df_plot: pandas.DataFrame :param ref_stn: Network and station codes for reference network (expected to be just one entry) :type ref_stn: dict of corresponding network and station codes under keys 'net' and 'sta' (expected to be just one entry) :param target_stns: Network and station codes for target network :type target_stns: dict of corresponding network and station codes under keys 'net' and 'sta' :param batch_options: Runtime options. :type batch_options: class BatchOptions :param filter_options: Filter options. :type filter_options: class FilterOptions :param display_options: Display options. :type display_options: class DisplayOptions """ register_matplotlib_converters() df_plot = df_plot.assign(relTtResidual=(df_plot['ttResidual'] - df_plot['ttResidualRef'])) # Re-order columns df_plot = df_plot[[ '#eventID', 'originTimestamp', 'mag', 'originLon', 'originLat', 'originDepthKm', 'net', 'sta', 'cha', 'pickTimestamp', 'phase', 'stationLon', 'stationLat', 'distance', 'snr', 'ttResidual', 'ttResidualRef', 'relTtResidual', 'qualityMeasureCWT', 'qualityMeasureSlope', 'nSigma' ]] # Sort data by event origin time df_plot = df_plot.sort_values(['#eventID', 'originTimestamp']) def _plot_decorator(opts): if opts.events is not None: _add_event_marker_lines(opts.events) if opts.deployments is not None: _add_temporary_deployment_intervals(opts.deployments) _plot_target_network_rel_residuals( df_plot, target_stns, ref, batch_options, filter_options, annotator=lambda: _plot_decorator(display_options)) deregister_matplotlib_converters()
def test_registry_resets(self): units = pytest.importorskip("matplotlib.units") dates = pytest.importorskip("matplotlib.dates") # make a copy, to reset to original = dict(units.registry) try: # get to a known state units.registry.clear() date_converter = dates.DateConverter() units.registry[datetime] = date_converter units.registry[date] = date_converter register_matplotlib_converters() assert units.registry[date] is not date_converter deregister_matplotlib_converters() assert units.registry[date] is date_converter finally: # restore original stater units.registry.clear() for k, v in original.items(): units.registry[k] = v
def test_registry_resets(self): units = pytest.importorskip("matplotlib.units") dates = pytest.importorskip("matplotlib.dates") # make a copy, to reset to original = dict(units.registry) try: # get to a known state units.registry.clear() date_converter = dates.DateConverter() units.registry[datetime] = date_converter units.registry[date] = date_converter register_matplotlib_converters() assert units.registry[date] is not date_converter deregister_matplotlib_converters() assert units.registry[date] is date_converter finally: # restore original stater units.registry.clear() for k, v in original.items(): units.registry[k] = v
from fbprophet.diagnostics import performance_metrics logger = logging.getLogger('fbprophet') try: from matplotlib import pyplot as plt from matplotlib.dates import ( MonthLocator, num2date, AutoDateLocator, AutoDateFormatter, ) from matplotlib.ticker import FuncFormatter from pandas.plotting import deregister_matplotlib_converters deregister_matplotlib_converters() except ImportError: logger.error('Importing matplotlib failed. Plotting will not work.') try: import plotly.graph_objs as go except ImportError: logger.error('Importing plotly failed. Interactive plots will not work.') def plot(m, fcst, ax=None, uncertainty=True, plot_cap=True, xlabel='ds',
def manage_matplotlib_context(): """Return a context manager for temporarily changing matplotlib unit registries and rcParams. """ originalRcParams = matplotlib.rcParams.copy() ## Credits for this style go to the ggplot and seaborn packages. ## We copied the style file to remove dependencies on the Seaborn package. ## Check it out, it's an awesome library for plotting customRcParams = { "patch.facecolor": "#348ABD", # blue "patch.antialiased": True, "font.size": 10.0, "figure.edgecolor": "0.50", # Seaborn common parameters "figure.facecolor": "white", "text.color": ".15", "axes.labelcolor": ".15", # legend.frameon: False "legend.numpoints": 1, "legend.scatterpoints": 1, "xtick.direction": "out", "ytick.direction": "out", "xtick.color": ".15", "ytick.color": ".15", "axes.axisbelow": True, "image.cmap": "Greys", "font.family": ["sans-serif"], "font.sans-serif": [ "Arial", "Liberation Sans", "Bitstream Vera Sans", "sans-serif", ], "grid.linestyle": "-", "lines.solid_capstyle": "round", # Seaborn darkgrid parameters # .15 = dark_gray # .8 = light_gray "axes.grid": True, "axes.facecolor": "#EAEAF2", "axes.edgecolor": "white", "axes.linewidth": 0, "grid.color": "white", # Seaborn notebook context "figure.figsize": [8.0, 5.5], "axes.labelsize": 11, "axes.titlesize": 12, "xtick.labelsize": 10, "ytick.labelsize": 10, "legend.fontsize": 10, "grid.linewidth": 1, "lines.linewidth": 1.75, "patch.linewidth": 0.3, "lines.markersize": 7, "lines.markeredgewidth": 0, "xtick.major.width": 1, "ytick.major.width": 1, "xtick.minor.width": 0.5, "ytick.minor.width": 0.5, "xtick.major.pad": 7, "ytick.major.pad": 7, } try: register_matplotlib_converters() matplotlib.rcParams.update(customRcParams) sns.set_style(style="white") yield finally: deregister_matplotlib_converters() # revert to original unit registries with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=matplotlib.cbook.mplDeprecation) matplotlib.rcParams.update(originalRcParams) # revert to original rcParams
def batch_process_xcorr(src_files, dataset, time_window, snr_threshold, pearson_cutoff_factor=0.5, save_plots=True, underlay_rcf_xcorr=False, force_save=False): """ Process a batch of .nc files to generate standard visualization graphics. PNG files are output alongside the source .nc file. To suppress file output, set save_plots=False. :param src_files: List of files to process :type src_files: Iterable of str :param dataset: Dataset to be used to ascertain the distance between stations. :type dataset: FederatedASDFDataset :param time_window: Lag time window to plot (plus or minus this value in seconds) :type time_window: float :param snr_threshold: Minimum signal to noise ratio for samples to be included into the clock lag estimate :type snr_threshold: float :param save_plots: Whether to save plots to file, defaults to True :param save_plots: bool, optional :param underlay_rcf_xcorr: Show the individual correlation of row sample with RCF beneath the computed time lag, defaults to False :param underlay_rcf_xcorr: bool, optional :return: List of files for which processing failed, and associated error. :rtype: list(tuple(str, str)) """ PY2 = (sys.version_info[0] == 2) register_matplotlib_converters() pbar = tqdm(total=len(src_files), dynamic_ncols=True) found_preexisting = False failed_files = [] skipped_count = 0 success_count = 0 for src_file in src_files: _, base_file = os.path.split(src_file) pbar.set_description(base_file) # Sleep to ensure progress bar is refreshed time.sleep(0.2) if not os.path.exists(src_file): tqdm.write("ERROR! File {} not found!".format(src_file)) failed_files.append((src_file, "File not found!")) continue # Extract timestamp from nc filename if available settings, title_tag = read_correlator_config(src_file) try: if save_plots: basename, _ = os.path.splitext(src_file) png_file = basename + ".png" # If png file already exists and has later timestamp than src_file, then skip it. if os.path.exists(png_file): src_file_time = os.path.getmtime(src_file) png_file_time = os.path.getmtime(png_file) png_file_size = os.stat(png_file).st_size if not force_save and (png_file_time > src_file_time) and ( png_file_size > 0): tqdm.write( "PNG file {} is more recent than source file {}, skipping!" .format( os.path.split(png_file)[1], os.path.split(src_file)[1])) found_preexisting = True skipped_count += 1 pbar.update() continue plot_xcorr_file_clock_analysis( src_file, dataset, time_window, snr_threshold, pearson_cutoff_factor, png_file=png_file, show=False, underlay_rcf_xcorr=underlay_rcf_xcorr, title_tag=title_tag, settings=settings) else: plot_xcorr_file_clock_analysis( src_file, dataset, time_window, snr_threshold, pearson_cutoff_factor, underlay_rcf_xcorr=underlay_rcf_xcorr, title_tag=title_tag, settings=settings) success_count += 1 pbar.update() except Exception as e: tqdm.write("ERROR processing file {}".format(src_file)) failed_files.append((src_file, str(e))) # Python 2 does not handle circular references, so it helps to explicitly clean up. if PY2: gc.collect() pbar.close() if found_preexisting: print( "Some files were skipped because pre-existing matching png files were up-to-date.\n" "Remove png files to force regeneration.") deregister_matplotlib_converters() return failed_files