def tile_rgb_images(*imgs, row=3, save_path=None, imshow=False, legend=None, **kwargs): make_dir_if_need(save_path) distinct_row = list(set([len(ims) for ims in imgs])) if len(distinct_row) > 1: raise ValueError( 'imgs should have same length, but got {0}'.format(distinct_row)) else: distinct_row = unpack_singleton(distinct_row) if 1 <= row < distinct_row: distinct_row = row suffix = get_time_suffix() if len(imgs) == 1 and distinct_row == 1: img = array2image(imgs[0][0]) filename = save_path.format(suffix) img.save(filename) plt.imshow(img) if imshow: if is_in_ipython(): plt.axis("off") plt.ioff() display.display(plt.gcf()) else: plt.axis("off") plt.imshow(img, interpolation="nearest", animated=True) plt.ioff() plt.gcf().show() return plt else: fig = plt.gcf() figure, ax = plt.subplots(2, 2) # fig.set_size_inches(len(imgs) * 2, row * 2) plt.clf() plt.ion() # is not None: for m in range(distinct_row * len(imgs)): plt.subplot(distinct_row, len(imgs), m + 1) if m < len(imgs) and legend is not None and len(legend) == len( imgs): plt.gca().set_title(legend[m]) img = array2image((imgs[int(m % len(imgs))][int(m // len(imgs))])) plt.imshow(img, interpolation="nearest", animated=True) plt.axis("off") filename = save_path.format(suffix) plt.savefig(filename, bbox_inches='tight') if imshow: # plSize = fig.get_size_inches() # fig.set_size_inches((int(round(plSize[0] * 0.75, 0)), int(round(plSize[1] * 0.75, 0)))) if is_in_ipython(): plt.ioff() display.display(plt.gcf()) else: plt.ioff() plt.show(block=False) return fig
def on_overall_batch_end(self, training_context): if not self.is_inplace: if (self.batch_inteval > 0 and (self.training_items.value_list[0]. training_context['current_batch'] + 1) % self.batch_inteval == 0) or (self.epoch_inteval > 0 and self.training_items. value_list[0].training_context['current_batch'] + 1 == self.training_items.value_list[0]. training_context['total_batch'] and (self.training_items.value_list[0]. training_context['current_epoch'] + 1) % self.epoch_inteval == 0): if is_in_ipython( ) and self.counter == self.clean_ipython_output_frequency: display.clear_output(wait=True) self.counter = 0 self.loss_history_list = [] self.metric_history_list = [] for trainitem in self.training_items.value_list: self.loss_history_list.append(trainitem.batch_loss_history) self.metric_history_list.append( trainitem.batch_metric_history) self.counter += 1 loss_metric_curve( self.loss_history_list, self.metric_history_list, legend=training_context['training_names'].value_list, calculate_base='batch', max_iteration=None, save_path=os.path.join(self.save_path, self.name_prefix), imshow=self.imshow)
def __init__(self, batch_inteval=100): super(PrintGradientsCallback, self).__init__(epoch_inteval=-1, batch_inteval=batch_inteval) self.is_in_ipython = is_in_ipython() self.is_in_colab = is_in_colab() self.batch_inteval = batch_inteval self.first_layer = '' self.last_layer = '' self.lines = []
def tile_rgb_images(*imgs, row=3, save_path=None, imshow=False): make_dir_if_need(save_path) row = len(imgs) suffix = get_time_suffix() if len(imgs) == 1 and row == 1: img = array2image(imgs[0][0]) filename = save_path.format(suffix) img.save(filename) plt.imshow(img) if imshow: if is_in_ipython(): plt.axis("off") plt.ioff() display.display(plt.gcf()) else: plt.axis("off") plt.imshow(img, interpolation="nearest", animated=True) plt.ioff() plt.gcf().show() else: fig = plt.gcf() #fig.set_size_inches(len(imgs) * 2, row * 2) plt.clf() plt.ion() # is not None: for m in range(row * len(imgs)): plt.subplot(row, len(imgs), m + 1) img = array2image((imgs[int(m % len(imgs))][int(m // len(imgs))])) plt.imshow(img, interpolation="nearest", animated=True) plt.axis("off") filename = save_path.format(suffix) plt.savefig(filename, bbox_inches='tight') if imshow == True: #plSize = fig.get_size_inches() #fig.set_size_inches((int(round(plSize[0] * 0.75, 0)), int(round(plSize[1] * 0.75, 0)))) if is_in_ipython(): plt.ioff() display.display(plt.gcf()) else: plt.ioff() plt.show(block=False)
def __init__(self, epoch_inteval, batch_inteval, save_path: str = None, imshow=False): super(VisualizationCallbackBase, self).__init__() self.is_in_ipython = is_in_ipython() self.is_in_colab = is_in_colab() self.epoch_inteval = epoch_inteval self.batch_inteval = batch_inteval if save_path is None: save_path = 'results' self.save_path = make_dir_if_need(save_path) self.imshow = imshow
def __init__(self, frequency=-1, unit='batch', save_path: str = None, imshow=False): super(VisualizationCallbackBase, self).__init__() self.is_in_ipython = is_in_ipython() self.is_in_colab = is_in_colab() self.frequency = frequency if unit in ('batch', 'step', 'epoch'): self.unit = unit else: print(red_color('Only [batch, step, epoch] are valid unit.', True)) if save_path is None: save_path = 'results' self.save_path = make_dir_if_need(save_path) self.imshow = imshow
def __init__(self, epoch_inteval=-1, batch_inteval=-1, save_path: str = 'results', reverse_image_transform=None, palette=None, background=(120, 120, 120), name_prefix: str = 'segtile_image_{0}.png', imshow=False): super(SegTileImageCallback, self).__init__(epoch_inteval, batch_inteval, save_path, imshow) self.is_in_ipython = is_in_ipython() self.is_in_colab = is_in_colab() self.palette = palette self.tile_image_name_prefix = name_prefix self.reverse_image_transform = reverse_image_transform self.background = np.expand_dims( np.expand_dims(to_numpy(background), 0), 0)
def on_overall_batch_end(self, training_context): if not self.is_inplace and training_context['steps'] > 10: if self.frequency > 0 and ( (self.unit == 'batch' and (training_context['steps'] + 1) % self.frequency == 0) or (self.unit == 'step' and (training_context['steps'] + 1) % self.frequency == 0)): if is_in_ipython( ) and self.counter == self.clean_ipython_output_frequency: display.clear_output(wait=True) self.counter = 0 self.loss_history_list = [] self.metric_history_list = [] plotable_metric_names = OrderedDict() for i in range(len(self.training_items.value_list)): trainitem = self.training_items.value_list[i] self.loss_history_list.append(trainitem.batch_loss_history) plotable_metric_names[i] = [ k for k, v in trainitem._metrics.item_list if v.print_only == False ] self.metric_history_list.append( trainitem.batch_metric_history) self.counter += 1 fig = loss_metric_curve( self.loss_history_list, self.metric_history_list, metrics_names=plotable_metric_names, legend=training_context['training_names'].value_list, calculate_base='batch', max_iteration=None, save_path=os.path.join(self.save_path, self.name_prefix), imshow=self.imshow) if ctx.enable_tensorboard and ctx.summary_writer is not None: ctx.summary_writer.add_figure( 'overall/plot/loss_metric_curve', fig, global_step=training_context['steps'], close=True, walltime=time.time()) plt.close()
def __init__(self, frequency=-1, unit='batch', save_path: str = 'results', reverse_image_transform=None, labels=None, palette=None, background=(120, 120, 120), name_prefix: str = 'detection_plot_image_{0}.png', imshow=False): super(DetectionPlotImageCallback, self).__init__(frequency, unit, save_path, imshow) self.is_in_ipython = is_in_ipython() self.is_in_colab = is_in_colab() self.labels = labels self.palette = palette self.tile_image_name_prefix = name_prefix self.reverse_image_transform = reverse_image_transform self.background = np.expand_dims( np.expand_dims(to_numpy(background), 0), 0)
def __init__(self, frequency=-1, unit='batch', save_path: str = 'results', reverse_image_transform=None, is_label_mask=False, palette=None, background=(120, 120, 120), name_prefix: str = 'segtile_image_{0}.png', imshow=False): super(SegTileImageCallback, self).__init__(frequency, unit, save_path, imshow) self.is_in_ipython = is_in_ipython() self.is_in_colab = is_in_colab() self.is_label_mask = is_label_mask self.palette = palette self.tile_image_name_prefix = name_prefix self.reverse_image_transform = reverse_image_transform self.background = to_numpy(background)
def __init__(self, epoch_inteval=-1, batch_inteval=-1, save_path: str = 'results', name_prefix: str = 'tile_image_{0}.png', row=3, include_mask=None, reverse_image_transform=None, imshow=False): super(GanTileImageCallback, self).__init__(epoch_inteval, batch_inteval, save_path, imshow) self.is_in_ipython = is_in_ipython() self.is_in_colab = is_in_colab() self.tile_image_name_prefix = name_prefix self.reverse_image_transform = reverse_image_transform self.row = row dataprovider = enforce_singleton(ctx.get_data_provider()) self.accumulate_sample = False self.sample_enough = False if dataprovider.minibatch_size < row * row: self.accumulate_sample = True self.tile_images_list = [] self.output_arr = []
def __init__(self, epoch_inteval=-1, batch_inteval=-1, save_path: str = 'results', name_prefix: str = 'tile_image_{0}.png', row=3, include_input=True, include_output=True, include_target=True, include_mask=None, reverse_image_transform=None, imshow=False): super(TileImageCallback, self).__init__(epoch_inteval, batch_inteval, save_path, imshow) self.is_in_ipython = is_in_ipython() self.is_in_colab = is_in_colab() self.tile_image_name_prefix = name_prefix self.reverse_image_transform = reverse_image_transform self.row = row self.include_input = include_input self.include_output = include_output self.include_target = include_target self.include_mask = include_mask
def preview_images(self, key=None, is_concate=True): image_ds = [ ds.symbol for ds in self.traindata.get_datasets() if isinstance(ds, ImageDataset) and ds.object_type != ObjectType.image_path ] if len(image_ds) == 0: print( red_color( 'This data_provider not have any ImageDataset in it.')) return None else: if key is None: orig_mode = self.mode self.mode = 'dict' data = self.next() self.mode = orig_mode return_images = OrderedDict() for k, v in data.items(): if k.name in image_ds: batch_imgs = v batch_imgs = self.reverse_image_transform(batch_imgs) batch_imgs = np.concatenate( [img for img in batch_imgs], axis=-1 if batch_imgs[0].ndim == 2 or (batch_imgs[0].ndim == 3 and batch_imgs[0].shape[0] in [1, 3, 4]) else -2) return_images[k.name] = batch_imgs if is_in_ipython(): for k, v in return_images.items(): print(blue_color(k), flush=True) from IPython import display display.display(array2image(v)) else: return return_images elif isinstance(key, slice): start = key.start if key.start is not None else 0 stop = key.stop results = [] for k in range(start, stop, 1): img = self.traindata.data.__getitem__(k) if isinstance(img, np.ndarray): for fc in self.image_transform_funcs: if ( inspect.isfunction(fc) or isinstance(fc, Transform) ) and fc is not image_backend_adaption and fc is not Normalize and fc is not normalize: img = fc(img) results.append(img) if is_concate: results = np.concatenate( results, axis=-1 if results[0].ndim == 2 or (results[0].ndim == 3 and results[0].shape[0] in [1, 3, 4]) else -2) return array2image(results) else: return [array2image(img) for img in results] elif isinstance(key, int): img = self.traindata.data.__getitem__(key) if isinstance(img, np.ndarray): for fc in self.image_transform_funcs: if ( inspect.isfunction(fc) or isinstance(fc, Transform) ) and fc is not image_backend_adaption and fc is not Normalize and fc is not normalize: img = fc(img) return array2image(img)
def steps_histogram(grads, weights=None, sample_collected=None, bins=None, size=(18, 8), inteval=1, title='', save_path=None, imshow=False): global default_bins from mpl_toolkits.mplot3d import Axes3D if bins is None: bins = default_bins collected_samples = [] if sample_collected is not None and len(sample_collected) > 0: sample_collected = np.array(sample_collected) sample = np.arange(len(sample_collected)) collected_samples = sample[sample_collected == 1] plt.ion() fig = plt.figure(figsize=size) fig.patch.set_facecolor('white') if grads is not None: ax = fig.add_subplot( 1, 2, 1, projection='3d' ) if grads is not None and weights is not None else fig.add_subplot( 1, 1, 1, projection='3d') # ax = fig.gca(projection='3d') # Make verts a list, verts[i] will be a list of (x,y) pairs defining polygon i verts = [] # The ith polygon will appear on the plane y = zs[i] zs = np.arange(len(grads)) if len(collected_samples) == len(grads): zs = collected_samples new_zs = [] max_frequency = 0 for i in range(len(grads)): a, b = np.histogram(grads[i].reshape([-1]), bins) ys = a xs = b[:-1] new_zs.append(zs[i]) max_frequency = max(np.max(a), max_frequency) verts.append(polygon_under_graph(xs, ys)) poly = PolyCollection(verts, facecolors=['r', 'g', 'b', 'y'], alpha=.4) ax.add_collection3d(poly, zs=new_zs, zdir='y') override = { 'fontsize': 'small', 'verticalalignment': 'top', 'horizontalalignment': 'center' } ax.set_xlabel('gradients', override) ax.set_ylabel('steps', override) ax.set_zlabel('frequency', override) ax.set_xlim(min(bins), max(bins)) ax.set_ylim(0, int(max(new_zs))) ax.set_zlim(0, int(max_frequency * 1.1)) plt.title(title + ' Gradients Histogram') if weights is not None: ax = fig.add_subplot( 1, 2, 2, projection='3d') if grads is not None else fig.add_subplot( 1, 1, 1, projection='3d') bins = [b * 10 for b in bins] # Make verts a list, verts[i] will be a list of (x,y) pairs defining polygon i verts = [] # The ith polygon will appear on the plane y = zs[i] zs = np.arange(len(weights)) if len(collected_samples) == len(weights): zs = collected_samples new_zs = [] max_frequency = 0 for i in range(len(weights)): if i % inteval == 0: a, b = np.histogram(weights[i].reshape([-1]), bins) ys = a xs = b[:-1] + 0.001 new_zs.append(zs[i]) max_frequency = max(np.max(a), max_frequency) verts.append(polygon_under_graph(xs, ys)) poly = PolyCollection(verts, facecolors=['r', 'g', 'b', 'y'], alpha=.4) ax.add_collection3d(poly, zs=new_zs, zdir='y') override = { 'fontsize': 'small', 'verticalalignment': 'top', 'horizontalalignment': 'center' } ax.set_xlabel('weights', override) ax.set_ylabel('steps', override) ax.set_zlabel('frequency', override) ax.set_xlim(min(bins), max(bins)) ax.set_ylim(0, int(max(new_zs))) ax.set_zlim(0, int(max_frequency * 1.1)) plt.title('Weights Histogram') if save_path is not None: plt.savefig(save_path, bbox_inches='tight') if imshow == True: if is_in_ipython() or is_in_colab(): display.display(plt.gcf()) plt.close(fig) else: plt.ioff() plt.show(block=False)
def loss_metric_curve(losses, metrics, legend=None, calculate_base='epoch', max_iteration=None, save_path=None, imshow=False): fig = plt.gcf() fig.set_size_inches(18, 8) plt.clf() plt.ion() # is not None: plt.subplot(2, 2, 1) if losses.__class__.__name__ == 'HistoryBase': steps, values = losses.get_series('total_losses') plt.plot(steps, values) plt.legend(['loss'], loc='upper left') elif isinstance(losses, list): for item in losses: if item.__class__.__name__ == 'HistoryBase': steps, values = item.get_series('total_losses') plt.plot(steps, values) if legend is not None: plt.legend(['{0}'.format(lg) for lg in legend], loc='upper right') else: plt.legend(['{0}'.format(i) for i in range(len(losses))], loc='upper right') plt.title('model loss') plt.ylabel('loss') plt.xlabel(calculate_base) if max_iteration is not None: plt.xlim(0, max_iteration) plt.subplot(2, 2, 2) if metrics.__class__.__name__ == 'HistoryBase': for k, v in metrics.items(): steps, values = metrics.get_series(k) plt.plot(steps, values) plt.legend(list(metrics.keys()), loc='upper left') elif isinstance(metrics, list): legend_list = [] for i in range(len(metrics)): item = metrics[i] if item.__class__.__name__ == 'HistoryBase': for k, v in item.items(): steps, values = item.get_series(k) plt.plot(steps, values) if len(v) > 0 and legend is not None: legend_list.append(['{0} {1}'.format(k, legend[i])]) elif len(v) > 0: legend_list.append(['{0} {1}'.format(k, i)]) plt.legend(legend_list, loc='upper left') plt.title('model metrics') plt.ylabel('metrics') plt.xlabel(calculate_base) if max_iteration is not None: plt.xlim(0, max_iteration) if save_path is not None: plt.savefig(save_path, bbox_inches='tight') if imshow == True: if is_in_ipython(): plt.ioff() display.display(plt.gcf()) else: plt.ioff() plt.draw() plt.show(block=False)
def loss_metric_curve(losses, metrics, metrics_names, legend=None, calculate_base='epoch', max_iteration=None, save_path=None, imshow=False, **kwargs): colors = [] line_type = ['-', '--', '-.', ':'] fig = plt.gcf() fig.set_size_inches(18, 8) plt.clf() plt.ion() # is not None: loss_ax1 = fig.add_subplot(2, 2, 1) if losses.__class__.__name__ == 'HistoryBase': steps, values = losses.get_series('total_losses') loss_ax1.plot(steps, values, label='total_losses') elif isinstance(losses, list): for n in range(len(losses)): item = losses[n] legend_label = 'total_losses' + str(n) if legend is not None and len(legend) == len(losses): legend_label = legend[n] if item.__class__.__name__ == 'HistoryBase': steps, values = item.get_series('total_losses') p = loss_ax1.plot(steps, values, label=legend_label) colors.append(p[-1].get_color()) loss_ax1.set_title('model loss', fontsize=14, fontweight='bold') loss_ax1.set_ylabel('loss') loss_ax1.set_xlabel(calculate_base) loss_ax1.legend(loc="upper right") plt.legend(loc=2) if max_iteration is not None: loss_ax1.set_xlim(0, max_iteration) metric_ax1 = fig.add_subplot(2, 2, 2) if len(metrics) == 0: pass else: metric_ax2 = metric_ax1.twinx() first_axis_range = None second_axis_range = None first_axis_keys = [] second_axis_keys = [] first_axis_limit = [] second_axis_limit = [] if metrics.__class__.__name__ == 'HistoryBase': metrics_need_plot = metrics_names[0] for n in range(len(metrics)): k, v = list(metrics.items())[n] if k in metrics_need_plot: legend_label = k if legend is not None and len(legend) == len(metrics): legend_label = legend[n] steps, values = metrics.get_series(k) values_np = np.array(values) if first_axis_range is None: first_axis_range = (values_np.min(), values_np.mean(), values_np.max()) first_axis_keys.append(k) first_axis_limit = [ first_axis_range[0], first_axis_range[2] ] metric_ax1.plot(steps, values, label=legend_label) else: if second_axis_range is None and ( values_np.mean() < first_axis_range[1] * 0.1 or values_np.mean() > first_axis_range[1] * 10): second_axis_range = (values_np.min(), values_np.mean(), values_np.max()) metric_ax2.plot(steps, values, label=legend_label) second_axis_limit = [ second_axis_range[0], second_axis_range[2] ] second_axis_keys.append(k) elif second_axis_range is not None: compare_array = np.array([ list(first_axis_range), list(second_axis_range) ]) this_array = np.array([[ values_np.min(), values_np.mean(), values_np.max() ]]) distance = expand_dims( sqrt( reduce_sum((compare_array - this_array)**2, axis=-1)), 0) result = argmin(distance, axis=-1)[0] if result == 0: metric_ax1.plot(steps, values, label=legend_label) first_axis_keys.append(k) first_axis_limit = [ min(first_axis_limit[0], values_np.min()), max(first_axis_limit[1], values_np.max()) ] else: metric_ax2.plot(steps, values, label=legend_label) second_axis_keys.append(k) second_axis_limit = [ min(second_axis_limit[0], values_np.min()), max(second_axis_limit[1], values_np.max()) ] else: metric_ax1.plot(steps, values, label=legend_label) first_axis_limit = [ min(first_axis_limit[0], values_np.min()), max(first_axis_limit[1], values_np.max()) ] first_axis_keys.append(k) metric_ax1.legend(loc="lower right") metric_ax1.set_ylim(first_axis_limit[0], first_axis_limit[1]) if len(second_axis_keys) > 0: metric_ax2.legend() metric_ax2.set_ylim(second_axis_limit[0], second_axis_limit[1]) #plt.legend(loc='upper left') elif isinstance(metrics, list): legend_list = [] for i in range(len(metrics)): item = metrics[i] line_color = colors[i] if item.__class__.__name__ == 'HistoryBase': for j in range(len(item.items())): metrics_need_plot = metrics_names[i] k = list(item.keys())[j] if k in metrics_need_plot: legend_label = k + str(i) if legend is not None and len(legend) == len( metrics): legend_label = legend[i] + ' ' + k steps, values = item.get_series(k) values_np = np.array(values) if first_axis_range is None: first_axis_range = (values_np.min(), values_np.mean(), values_np.max()) first_axis_keys.append(k) first_axis_limit = [ first_axis_range[0], first_axis_range[2] ] metric_ax1.plot(steps, values, color=line_color, linestyle=line_type[j % 4], linewidth=int( (j // 4) % 4) + 1, label=legend_label) else: if second_axis_range is None and ( values_np.mean() < first_axis_range[1] * 0.1 or values_np.mean() > first_axis_range[1] * 10): second_axis_range = (values_np.min(), values_np.mean(), values_np.max()) second_axis_keys.append(k) second_axis_limit = [ second_axis_range[0], second_axis_range[2] ] metric_ax2.plot(steps, values, color=line_color, linestyle=line_type[j % 4], linewidth=int( (j // 4) % 4) + 1, label=legend_label) elif k in first_axis_keys: first_axis_limit = [ min(first_axis_limit[0], values_np.min()), max(first_axis_limit[1], values_np.max()) ] metric_ax1.plot(steps, values, color=line_color, linestyle=line_type[j % 4], linewidth=int( (j // 4) % 4) + 1, label=legend_label) elif k in second_axis_keys: second_axis_limit = [ min(second_axis_limit[0], values_np.min()), max(second_axis_limit[1], values_np.max()) ] metric_ax2.plot(steps, values, color=line_color, linestyle=line_type[j % 4], linewidth=int( (j // 4) % 4) + 1, label=legend_label) elif second_axis_range is not None: _, first_values = item.get_series( first_axis_keys[0]) first_values = np.array(first_values) _, second_values = item.get_series( second_axis_keys[0]) second_values = np.array(second_values) compare_array = np.array( [[ first_values.min(), first_values.mean(), first_values.max() ], [ second_values.min(), second_values.mean(), second_values.max() ]]) this_array = np.array([[ values_np.min(), values_np.mean(), values_np.max() ]]) distance = expand_dims( sqrt( reduce_sum((compare_array - this_array)**2, axis=-1)), 0) result = argmin(distance, axis=-1)[0] if result == 0: first_axis_keys.append(k) first_axis_limit = [ min(first_axis_limit[0], values_np.min()), max(first_axis_limit[1], values_np.max()) ] metric_ax1.plot( steps, values, color=line_color, linestyle=line_type[j % 4], linewidth=int((j // 4) % 4) + 1, label=legend_label) else: second_axis_keys.append(k) second_axis_limit = [ min(second_axis_limit[0], values_np.min()), max(second_axis_limit[1], values_np.max()) ] metric_ax2.plot( steps, values, color=line_color, linestyle=line_type[j % 4], linewidth=int((j // 4) % 4) + 1, label=legend_label) else: first_axis_keys.append(k) first_axis_limit = [ min(first_axis_limit[0], values_np.min()), max(first_axis_limit[1], values_np.max()) ] metric_ax1.plot(steps, values, color=line_color, linestyle=line_type[j % 4], linewidth=int( (j // 4) % 4) + 1, label=legend_label) if len(values) > 0 and legend is not None: legend_list.append( ['{0} {1}'.format(k, legend[i])]) elif len(values) > 0: legend_list.append(['{0} {1}'.format(k, i)]) metric_ax1.legend(loc="lower right") metric_ax1.set_ylim(first_axis_limit[0], first_axis_limit[1]) if len(second_axis_keys) > 0: metric_ax2.legend() metric_ax2.set_ylim(second_axis_limit[0], second_axis_limit[1]) #plt.legend(legend_list,loc='upper left') metric_ax1.set_title('model metrics', fontsize=14, fontweight='bold') metric_ax1.set_ylabel(','.join(first_axis_keys)) metric_ax1.set_xlabel(calculate_base) if len(second_axis_keys) > 0: metric_ax2.set_ylabel(','.join(second_axis_keys)) #metric_ax2.cla() if max_iteration is not None: metric_ax1.set_xlim(0, max_iteration) if save_path is not None: plt.savefig(save_path, bbox_inches='tight') plt.tight_layout() if imshow: if is_in_ipython(): plt.ioff() display.display(plt.gcf()) else: plt.ioff() plt.draw() plt.show(block=False) return fig
from __future__ import absolute_import from __future__ import division from __future__ import print_function import sys from trident.misc.ipython_utils import is_in_ipython, is_in_colab import math if is_in_ipython(): from IPython import display from tkinter import * if not is_in_colab: import matplotlib matplotlib.use( 'TkAgg' if not is_in_ipython() and not is_in_colab() else 'NbAgg') else: import matplotlib import matplotlib.pyplot as plt from matplotlib.collections import PolyCollection import matplotlib.patches as patches import matplotlib.font_manager fonts = matplotlib.font_manager.findSystemFonts(fontpaths=None, fontext='ttf') fontnames = [ matplotlib.font_manager.FontProperties(fname=fname).get_name() for fname in fonts ] default_font = None
from trident.callbacks.callback_base import CallbackBase from trident.data.mask_common import label2color from trident.misc.ipython_utils import is_in_ipython, is_in_colab from trident.misc.visualization_utils import * from trident.data.bbox_common import * if get_backend() == 'pytorch': from trident.backend.pytorch_backend import try_map_args_and_call from trident.backend.pytorch_ops import to_numpy, to_tensor, arange, shuffle, cast, clip, sqrt, int_shape, argmax, softmax, any_abnormal_number, reduce_any elif get_backend() == 'tensorflow': from trident.backend.tensorflow_backend import try_map_args_and_call from trident.backend.tensorflow_ops import to_numpy, to_tensor, arange, shuffle, cast, clip, sqrt, int_shape, concate, zeros_like, ones_like, argmax, softmax, any_abnormal_number, \ not_equal,reduce_any if is_in_ipython() or is_in_colab(): from IPython import display _session = get_session() _backend = get_backend() __all__ = [ 'VisualizationCallbackBase', 'TileImageCallback', 'PrintGradientsCallback', 'SegTileImageCallback', 'PlotLossMetricsCallback', 'DetectionPlotImageCallback' ] class VisualizationCallbackBase(CallbackBase): def __init__(self, epoch_inteval,