def modify_doc(doc): """ Contains the application, including all callbacks TODO: could the callbacks be outsourced? :param doc: :type doc: """ logger.debug('modify_doc has been called') def get_data_frames(ie,): """ Called one time initially, and then every time the experiment number is changed by the slider :param ie: experiment number :type ie: int :returns: dataframe from stella datafile and dataframe with tau and phi and fitted values :rtype: list of 2 pandas dataframes """ logger.debug('get_dataframe with ie={}'.format(ie)) fid = polymer.getfid(ie) #read FID or series of FIDs for selected experiment try: tau = polymer.get_tau_axis(ie) #numpy array containing the taus for experiment ie try: startpoint=fid_slider.range[0] #lower integration bound endpoint = fid_slider.range[1] #upper integration bound except NameError: # fid_slider not initialized for first plot. Use default values: startpoint=int(0.05*polymer.getparvalue(ie,'BS')) endpoint = int(0.1*polymer.getparvalue(ie,'BS')) logger.debug('fid_slider not initialized for first plot. Use default values {} and {}.'.format(startpoint, endpoint)) polymer.addparameter(ie,'fid_range',(startpoint,endpoint)) #add integration range to parameters to make it accesible phi = get_mag_amplitude(fid, startpoint, endpoint, polymer.getparvalue(ie,'NBLK'), polymer.getparvalue(ie,'BS')) # list containing averaged fid amplitudes (which is proportional to a magnetization phi) df = pd.DataFrame(data=np.c_[tau, phi], columns=['tau', 'phi']) # DataFrames are nice df['phi_normalized'] = (df['phi'] - df['phi'].iloc[0] ) / (df['phi'].iloc[-1] - df['phi'].iloc[1] ) #Normalize magnetization, #Note: in the normalized magnetization the magnetization build-up curves and magnetization decay curves look alike #Note: this makes it easier for fitting as everything looks like 1 * exp(-R/time) in first order polymer.addparameter(ie,'df_magnetization',df) # make the magnetization dataframe accesible as parameter fit_option = 2 #mono exponential, 3 parameter fit p0=[1.0, polymer.getparvalue(ie,'T1MX')**-1*2, 0] #choose startparameters for fitting an exponential decay df, popt = magnetization_fit(df, p0, fit_option) # use leastsq to find optimal parameters polymer.addparameter(ie,'popt(mono_exp)',popt) # add fitting parameters for later access logger.info('fitfunction(t) = {} * exp(- {} * t) + {}'.format(*popt)) # print the fitting parameters to console (for convenience) except KeyError: logger.warning('no relaxation experiment found') tau=np.zeros(1) phi=np.zeros(1) df = pd.DataFrame(data=np.c_[tau, phi], columns=['tau', 'phi']) df['phi_normalized'] = np.zeros(1) df['fit_phi'] = np.zeros(1) return fid, df def calculate_mag_dec(attr, old, new, start_ie=None): ''' Is being call from the callback for the experiment chooser loads selected experiment visualize in plot p1 and p2 gets experiment number from the slider writes source_fid.data from the fid from the polymer object writes source_mag_dec.data from the dataframe ''' ie = experiment_slider.value #get expermient number from the slider logger.debug('calculate mag_dec for ie={}'.format(ie)) fid, df = get_data_frames(ie) source_fid.data=ColumnDataSource.from_df(fid) #convert fid to bokeh format source_mag_dec.data = ColumnDataSource.from_df(df) def plot_par(): ''' Creates plot for the parameters Called with every update from the callback''' logger.debug('creating plot for the parameters') # read data due to selection of select_x/y xs = par_df[select_xaxis.value ].values ys = par_df[select_yaxis.value].values # read titles due to name of select_x/y x_title = select_xaxis.value.title() y_title = select_yaxis.value.title() # remark: many attributes in a bokeh plot cannot be modified after initialization # for example p4.x_axis_type='datetime' does not work. keywords are a # workaround to pass all optional arguments initially # set optional keyword arguments, kw, for figure() kw = dict() #initialize if select_xaxis.value in discrete: kw['x_range'] = sorted(set(xs)) if select_yaxis.value in discrete: kw['y_range'] = sorted(set(ys)) if select_yaxis.value in time: kw['y_axis_type'] = 'datetime' if select_xaxis.value in time: kw['x_axis_type'] = 'datetime' kw['title']="%s vs %s" % (x_title, y_title) # create figure using optional keywords kw p4 = figure(plot_height=300, plot_width=600, tools='pan,box_zoom,reset', **kw) # set axis label p4.xaxis.axis_label = x_title p4.yaxis.axis_label = y_title # strings at x axis ticks need a lot of space. solution: rotate label orientation if select_xaxis.value in discrete: p4.xaxis.major_label_orientation = pd.np.pi / 4 # rotates labels... # standard size of symbols sz = 9 # custom size of symbols due to select_size if select_size.value != 'None': groups = pd.qcut(pd.to_numeric(par_df[select_size.value].values), len(SIZES)) sz = [SIZES[xx] for xx in groups.codes] # standard color c = "#31AADE" # custom color due to select_color if select_color.value != 'None': groups = pd.qcut(pd.to_numeric(par_df[select_color.value]).values, len(COLORS)) c = [COLORS[xx] for xx in groups.codes] # create the plot using circles p4.circle(x=xs, y=ys, color=c, size=sz, line_color="white", alpha=0.6, hover_color='white', hover_alpha=0.5) return p4 #return the plot def callback_update_plot_1(attr, old, new): ''' Callback for update of figure 1 in parameters tab ''' tabs.tabs[1].child.children[1] = plot_par() print(tabs.tabs[1].child.children[1]) logger.debug('Parameter plot updated') # p4 = plot_par() def callback_update_p3(): logger.debug('update plot 3') p3 = fit_mag_decay_all(polymer,par_df) return p3 def callback_update_experiment(attr, old, new): """ Callback for the experiment chooser """ ie = experiment_slider.value logger.debug('Callback experiment update, ie={}'.format(ie)) fid_slider.end = polymer.getparvalue(ie,'BS') try: fid_slider.range=polymer.getparvalue(ie,'fid_range') except: startpoint = int(0.05 * polymer.getparvalue(ie,'BS')) endpoint = int(0.1 * polymer.getparvalue(ie,'BS')) fid_slider.range=(startpoint,endpoint) calculate_mag_dec(attr,old,new) def callback_load_more_data(attr,old,new): ''' callback for loading of data ''' # TODO: implement logger.debug('callback for loading of data ') logger.error('Not implemented!') path=pathbox.value.strip() file=filebox.value.strip() if file=="*.sdf": logger.info('callback for loading data. filename: {}'.format(file)) allsdf=filter(lambda x: x.endswith('.sdf'),os.listdir(path)) for f in allsdf: sdf_list.append(sdf.StelarDataFile(f,path)) else: sdf_list.append(sdf.StelarDataFile(file,path)) filenames=[x.file() for x in sdf_list] filenames_df=pd.DataFrame(data=filenames,columns=['file']) table_source.data=ColumnDataSource.from_df(filenames_df) def callback_export_data(attr,old,new): logger.debug('callback_export_data has been called ') logger.error('Not implemented!') pass def callback_write_table_to_file(attr,old,new): ##FIXME logger.debug('callback_write_table_to_file has been called ') logger.error('Not implemented!') pass # path=export_text.value.strip() # exportdata=export_source.data # CustomJS(args=dict(source=export_source), # code=open(join(dirname(__file__), "export_csv.js")).read()) def callback_update_parameters(): ''' callback for button function to call when button is clicked for updates parameters of polymer, since they can change during evaluation ''' logger.debug('callback for button (update parameter).') par_df, columns, discrete, continuous, time, quantileable = polymer.scan_parameters() select_xaxis.options=columns select_yaxis.options=columns select_size.options=['None']+quantileable select_color.options=['None']+quantileable logger.info('Starting the script') ### This is the start of the script ### ### The callbacks are above ### #load data: # TODO: how to handle multiple datafiles? # New Tab for each datafile? # dropdown selection to choose datafile # complete new start of process? (probably not prefered) polymer = load_data('glyzerin_d3_300K.sdf') nr_experiments = polymer.get_number_of_experiments() start_ie = 1 # initially set ie = 1 par_df, columns, discrete, continuous, time, quantileable = polymer.scan_parameters(20) # for the initial call get the dataframes without callback # they are being updated in following callbacks fid, df = get_data_frames(start_ie) source_fid = ColumnDataSource(data=ColumnDataSource.from_df(fid)) source_mag_dec = ColumnDataSource(data=ColumnDataSource.from_df(df)) # initialy creates the plots p1 and p2 p1, p2 = create_plot_1_and_2(source_fid, source_mag_dec) ### initiates widgets, which will call the callback on change ### # initiate slider to choose experiment experiment_slider = Slider(start=1, end=nr_experiments, value=1, step=1,callback_policy='mouseup', width=800) #select experiment by value # initiate slider for the range in which fid shall be calculated # select the intervall from which magneitization is calculated from fid fid_slider = RangeSlider(start=1,end=polymer.getparvalue(start_ie,'BS'), range=polymer.getparvalue(start_ie,'fid_range'), step=1,callback_policy='mouseup', width=400) # fit magnetization decay for all experiments p3 = fit_mag_decay_all(polymer, par_df) # refit mag dec with updated ranges after button push button_refit = Button(label='Update',button_type="success") button_refit.on_click(callback_update_p3) # initialize empty source for experiment slider source = ColumnDataSource(data=dict(value=[])) # 'data' is the attribute. it's a field in source, which is a ColumnDataSource # initiate callback_update_experiment which is the callback source.on_change('data',callback_update_experiment) #source for experiment_slider experiment_slider.callback = CustomJS(args=dict(source=source),code=""" source.data = { value: [cb_obj.value] } """)#unfortunately this customjs is needed to throttle the callback in current version of bokeh # initialize empty source for fid slider, same as above source2 = ColumnDataSource(data=dict(range=[], ie=[])) source2.on_change('data',calculate_mag_dec) fid_slider.callback=CustomJS(args=dict(source=source2),code=""" source.data = { range: cb_obj.range } """)#unfortunately this customjs is needed to throttle the callback in current version of bokeh # same for the update button button_scan = Button(label='Scan Parameters',button_type="success") button_scan.on_click(callback_update_parameters) # here comes for callbacks for x, y, size, color select_xaxis = Select(title='X-Axis', value='ZONE', options=columns) select_xaxis.on_change('value', callback_update_plot_1) select_yaxis = Select(title='Y-Axis', value='TIME', options=columns) select_yaxis.on_change('value', callback_update_plot_1) select_size = Select(title='Size', value='None', options=['None'] + quantileable) select_size.on_change('value', callback_update_plot_1) select_color = Select(title='Color', value='None', options=['None'] + quantileable) select_color.on_change('value', callback_update_plot_1) controls_p4 = widgetbox([button_scan, select_xaxis,select_yaxis,select_color,select_size], width=150) #p4 = plot_par() layout_p4 = row(controls_p4,plot_par()) logger.debug('layout for parameter plot created') #### #### TODO: write file input #### TODO: select files to import #### TODO: discard imported files #### # load more data: table_source=ColumnDataSource(data=dict()) sdf_list=[polymer] # TODO: This is current plan, to save the different dataframes in a list, right? filenames=[x.file() for x in sdf_list] files_df=pd.DataFrame(data=filenames,columns=['file']) table_source.data=ColumnDataSource.from_df(files_df) t_columns = [ TableColumn(field='file', title='Path / Filename'), #TableColumn(field='file', title='Filename'), ] table=DataTable(source=table_source,columns=t_columns) pathbox=TextInput(title="Path",value=os.path.curdir) filebox=TextInput(title="Filename",value="*.sdf") pathbox.on_change('value',callback_load_more_data) filebox.on_change('value',callback_load_more_data) layout_input=column(pathbox,filebox,table) # Data Out: export data from figures # & export parameters export_source=ColumnDataSource(data=dict()) export_columns=[] output_table=DataTable(source=export_source,columns=export_columns) export_slider = Slider(start=1, end=4, value=3, step=1,callback_policy='mouseup', width=200) #do we need mouseup on this? export_slider.on_change('value',callback_export_data) export_text = TextInput(title="Path",value=os.path.curdir) export_button = Button(label='Export to csv',button_type="success") # FIXME Callback doesn't work yet export_button.on_click(callback_write_table_to_file) layout_output=row(column(export_slider,export_text,export_button),output_table) print('after layout_output') # set the layout of the tabs layout_p1 = column(experiment_slider, p1, row( column(fid_slider,p2), column(button_refit, p3) ), ) tab_relaxation = Panel(child = layout_p1, title = 'Relaxation') tab_parameters = Panel(child = layout_p4, title = 'Parameters') tab_input = Panel(child = layout_input, title = 'Data In') tab_output = Panel(child = layout_output, title = 'Data Out') # initialize tabs object with 3 tabs tabs = Tabs(tabs = [tab_relaxation, tab_parameters, tab_input, tab_output]) print('tabs') doc.add_root(tabs) doc.add_root(source) # i need to add source to detect changes doc.add_root(source2) print('tab tab')
###################################### # SET UP ALL THE WIDGETS AND CALLBACKS ###################################### source = ColumnDataSource(data=dict(value=[])) source.on_change('data', update_data) exptime = Slider(title="Integration Time (hours)", value=24., start=1., end=1000.0, step=1.0, callback_policy='mouseup') exptime.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) magnitude = Slider(title="V Band Magnitude of Object", value=10., start=-10, end=40., step=1.0, callback_policy='mouseup') magnitude.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) diameter = Slider(title="Mirror Diameter (meters)", value=10.0, start=0.5, end=50.,
spec = Cratio + np.random.randn(len(Cratio))*sig planet.data = dict(lam=lam, cratio=Cratio*1e9, spec=spec*1e9, downerr=(spec-sig)*1e9, uperr=(spec+sig)*1e9) format_button_group.active = None ###################################### # SET UP ALL THE WIDGETS AND CALLBACKS ###################################### source = ColumnDataSource(data=dict(value=[])) source.on_change('data', update_data) exptime = Slider(title="Integration Time (hours)", value=20., start=10., end=100.0, step=1.0, callback_policy='mouseup') exptime.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) distance = Slider(title="Distance (parsec)", value=10., start=2., end=50.0, step=1.0, callback_policy='mouseup') distance.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) radius = Slider(title="Planet Radius (R_Earth)", value=1.0, start=0.5, end=3., step=0.1, callback_policy='mouseup') radius.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) semimajor= Slider(title="Semi-major axis of orbit (AU)", value=1.0, start=0.2, end=2., step=0.1, callback_policy='mouseup') semimajor.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) exozodi = Slider(title="Number of Exozodi", value = 1.0, start=1.0, end=10., step=1., callback_policy='mouseup') exozodi.callback = CustomJS(args=dict(source=source), code="""
def generate_gui(tsne, cut_extracellular_data, all_extra_spike_times, time_axis, cluster_info_file, use_existing_cluster, autocor_bin_number, sampling_freq, prb_file=None, k4=False, verbose=False): if k4: tsne_figure_size = [1000, 800] tsne_min_border_left = 50 spike_figure_size = [500, 500] hist_figure_size = [500, 500] heatmap_plot_size = [200, 800] clusters_table_size = [400, 300] layout_size = [1500, 1400] slider_size = [300, 100] user_info_size = [700, 80] else: tsne_figure_size = [850, 600] tsne_min_border_left = 10 spike_figure_size = [450, 300] hist_figure_size = [450, 300] heatmap_plot_size = [200, 800] clusters_table_size = [400, 400] layout_size = [1200, 800] slider_size = [270, 80] user_info_size = [450, 80] # Plots ------------------------------ # scatter plot global non_selected_points_alpha global selected_points_size global non_selected_points_size global update_old_selected_switch global previously_selected_spike_indices tsne_fig_tools = "pan,wheel_zoom,box_zoom,box_select,lasso_select,tap,resize,reset,save" tsne_figure = figure(tools=tsne_fig_tools, plot_width=tsne_figure_size[0], plot_height=tsne_figure_size[1], title='T-sne', min_border=10, min_border_left=tsne_min_border_left, webgl=True) tsne_source = ColumnDataSource({'tsne-x': tsne[0], 'tsne-y': tsne[1]}) tsne_selected_points_glyph = Circle(x='tsne-x', y='tsne-y', size=selected_points_size, line_alpha=0, fill_alpha=1, fill_color='red') tsne_nonselected_points_glyph = Circle(x='tsne-x', y='tsne-y', size=non_selected_points_size, line_alpha=0, fill_alpha=non_selected_points_alpha, fill_color='blue') tsne_invisible_points_glyph = Circle(x='tsne-x', y='tsne-y', size=selected_points_size, line_alpha=0, fill_alpha=0) tsne_nonselected_glyph_renderer = tsne_figure.add_glyph(tsne_source, tsne_nonselected_points_glyph, selection_glyph=tsne_invisible_points_glyph, nonselection_glyph=tsne_nonselected_points_glyph, name='tsne_nonselected_glyph_renderer') # note: the invisible glyph is required to be able to change the size of the selected points, since the # use of selection_glyph is usefull only for colors and alphas tsne_invinsible_glyph_renderer = tsne_figure.add_glyph(tsne_source, tsne_invisible_points_glyph, selection_glyph=tsne_selected_points_glyph, nonselection_glyph=tsne_invisible_points_glyph, name='tsne_invinsible_glyph_renderer') tsne_figure.select(BoxSelectTool).select_every_mousemove = False tsne_figure.select(LassoSelectTool).select_every_mousemove = False def on_tsne_data_update(attr, old, new): global previously_selected_spike_indices global currently_selected_spike_indices global non_selected_points_alpha global non_selected_points_size global selected_points_size global checkbox_find_clusters_of_selected_points previously_selected_spike_indices = np.array(old['1d']['indices']) currently_selected_spike_indices = np.array(new['1d']['indices']) num_of_selected_spikes = len(currently_selected_spike_indices) if num_of_selected_spikes > 0: if verbose: print('Num of selected spikes = ' + str(num_of_selected_spikes)) # update t-sne plot tsne_invisible_points_glyph.size = selected_points_size tsne_nonselected_points_glyph.size = non_selected_points_size tsne_nonselected_points_glyph.fill_alpha = non_selected_points_alpha # update spike plot avg_x = np.mean(cut_extracellular_data[:, :, currently_selected_spike_indices], axis=2) spike_mline_plot.data_source.data['ys'] = avg_x.tolist() print('Finished avg spike plot') # update autocorelogram diffs, norm = crosscorrelate_spike_trains(all_extra_spike_times[currently_selected_spike_indices].astype(np.int64), all_extra_spike_times[currently_selected_spike_indices].astype(np.int64), lag=1500) hist, edges = np.histogram(diffs, bins=autocor_bin_number) hist_plot.data_source.data["top"] = hist hist_plot.data_source.data["left"] = edges[:-1] / sampling_freq hist_plot.data_source.data["right"] = edges[1:] / sampling_freq print('finished autocorelogram') # update heatmap if prb_file is not None: print('Doing heatmap') data = cut_extracellular_data[:, :, currently_selected_spike_indices] final_image, (x_size, y_size) = spike_heatmap.create_heatmap(data, prb_file, rotate_90=True, flip_ud=True, flip_lr=False) new_image_data = dict(image=[final_image], x=[0], y=[0], dw=[x_size], dh=[y_size]) heatmap_data_source.data.update(new_image_data) print('Finished heatmap') tsne_source.on_change('selected', on_tsne_data_update) # spike plot spike_fig_tools = 'pan,wheel_zoom,box_zoom,reset,save' spike_figure = figure(toolbar_location='below', plot_width=spike_figure_size[0], plot_height=spike_figure_size[1], tools=spike_fig_tools, title='Spike average', min_border=10, webgl=True, toolbar_sticky=False) num_of_channels = cut_extracellular_data.shape[0] num_of_time_points = cut_extracellular_data.shape[1] xs = np.repeat(np.expand_dims(time_axis, axis=0), repeats=num_of_channels, axis=0).tolist() ys = np.ones((num_of_channels, num_of_time_points)).tolist() spike_mline_plot = spike_figure.multi_line(xs=xs, ys=ys) # autocorelogram plot hist, edges = np.histogram([], bins=autocor_bin_number) hist_fig_tools = 'pan,wheel_zoom,box_zoom,save,reset' hist_figure = figure(toolbar_location='below', plot_width=hist_figure_size[0], plot_height=hist_figure_size[1], tools=hist_fig_tools, title='Autocorrelogram', min_border=10, webgl=True, toolbar_sticky=False) hist_plot = hist_figure.quad(bottom=0, left=edges[:-1], right=edges[1:], top=hist, color="#3A5785", alpha=0.5, line_color="#3A5785") # heatmap plot heatmap_plot = figure(toolbar_location='right', plot_width=1, plot_height=heatmap_plot_size[1], x_range=(0, 1), y_range=(0, 1), title='Probe heatmap', toolbar_sticky=False) if prb_file is not None: data = np.zeros(cut_extracellular_data.shape) final_image, (x_size, y_size) = spike_heatmap.create_heatmap(data, prb_file, rotate_90=True, flip_ud=True, flip_lr=False) final_image[:, :, ] = 4294967295 # The int32 for the int8 255 (white) plot_width = max(heatmap_plot_size[0], int(heatmap_plot_size[1] * y_size / x_size)) heatmap_plot = figure(toolbar_location='right', plot_width=plot_width, plot_height=heatmap_plot_size[1], x_range=(0, x_size), y_range=(0, y_size), title='Probe heatmap', toolbar_sticky=False) heatmap_data_source = ColumnDataSource(data=dict(image=[final_image], x=[0], y=[0], dw=[x_size], dh=[y_size])) heatmap_renderer = heatmap_plot.image_rgba(source=heatmap_data_source, image='image', x='x', y='y', dw='dw', dh='dh', dilate=False) heatmap_plot.axis.visible = None heatmap_plot.xgrid.grid_line_color = None heatmap_plot.ygrid.grid_line_color = None # --------------------------------------- # --------------- CONTROLS -------------- # Texts and Tables # the clusters DataTable if use_existing_cluster: cluster_info = load_cluster_info(cluster_info_file) else: cluster_info = create_new_cluster_info_file(cluster_info_file, len(tsne)) cluster_info_data_source = ColumnDataSource(cluster_info) clusters_columns = [TableColumn(field='Cluster', title='Clusters'), TableColumn(field='Num_of_Spikes', title='Number of Spikes')] clusters_table = DataTable(source=cluster_info_data_source, columns=clusters_columns, selectable=True, editable=False, width=clusters_table_size[0], height=clusters_table_size[1], scroll_to_selection=True) def on_select_cluster_info_table(attr, old, new): global selected_cluster_names cluster_info = load_cluster_info(cluster_info_file) indices = list(chain.from_iterable(cluster_info.iloc[new['1d']['indices']].Spike_Indices.tolist())) selected_cluster_names = list(cluster_info.index[new['1d']['indices']]) old = new = tsne_source.selected tsne_source.selected['1d']['indices'] = indices tsne_source.trigger('selected', old, new) user_info_edit.value = 'Selected clusters = ' + ', '.join(selected_cluster_names) cluster_info_data_source.on_change('selected', on_select_cluster_info_table) def update_data_table(): cluster_info_data_source = ColumnDataSource(load_cluster_info(cluster_info_file)) cluster_info_data_source.on_change('selected', on_select_cluster_info_table) clusters_table.source = cluster_info_data_source options = list(cluster_info_data_source.data['Cluster']) options.insert(0, 'No cluster selected') select_cluster_to_move_points_to.options = options # cluster TextBox that adds cluster to the DataTable new_cluster_name_edit = TextInput(value='give the new cluster a name', title='Put selected points into a new cluster') def on_text_edit_new_cluster_name(attr, old, new): global currently_selected_spike_indices global clusters_of_all_spikes new_cluster_name = new_cluster_name_edit.value spike_indices_to_delete_from_existing_clusters = {} for spike_index in currently_selected_spike_indices: if clusters_of_all_spikes[spike_index] != -1: cluster_index = clusters_of_all_spikes[spike_index] if cluster_index not in spike_indices_to_delete_from_existing_clusters: spike_indices_to_delete_from_existing_clusters[cluster_index] = [spike_index] else: spike_indices_to_delete_from_existing_clusters[cluster_index].append(spike_index) cluster_info = load_cluster_info(cluster_info_file) for cluster_index in spike_indices_to_delete_from_existing_clusters.keys(): cluster_name = cluster_info.iloc[cluster_index].name remove_spikes_from_cluster(cluster_info_file, cluster_name, spike_indices_to_delete_from_existing_clusters[cluster_index], unassign=False) add_cluster_to_cluster_info(cluster_info_file, new_cluster_name, currently_selected_spike_indices) update_data_table() new_cluster_name_edit.on_change('value', on_text_edit_new_cluster_name) # user information Text user_info_edit = TextInput(value='', title='User information', width=user_info_size[0], height=user_info_size[1]) # Buttons ------------------------ # show all clusters Button button_show_all_clusters = Toggle(label='Show all clusters', button_type='primary') def on_button_show_all_clusters(state, *args): global tsne_clusters_scatter_plot if state: cluster_info = load_cluster_info(cluster_info_file) num_of_clusters = cluster_info.shape[0] indices_list_of_lists = cluster_info['Spike_Indices'].tolist() indices = [item for sublist in indices_list_of_lists for item in sublist] cluster_indices = np.arange(num_of_clusters) if verbose: print('Showing all clusters in colors... wait for it...') colors = [] for c in cluster_indices: r = np.random.random(size=1) * 255 g = np.random.random(size=1) * 255 for i in np.arange(len(indices_list_of_lists[c])): colors.append("#%02x%02x%02x" % (int(r), int(g), 50)) first_time = True for renderer in tsne_figure.renderers: if renderer.name == 'tsne_all_clusters_glyph_renderer': renderer.data_source.data['fill_color'] = renderer.data_source.data['line_color'] = colors renderer.glyph.fill_color = 'fill_color' renderer.glyph.line_color = 'line_color' first_time = False break if first_time: tsne_clusters_scatter_plot = tsne_figure.scatter(tsne[0][indices], tsne[1][indices], size=1, color=colors, alpha=1, name='tsne_all_clusters_glyph_renderer') tsne_clusters_scatter_plot.visible = True button_show_all_clusters.label = 'Hide all clusters' else: if verbose: print('Hiding clusters') button_show_all_clusters.update() tsne_clusters_scatter_plot.visible = False button_show_all_clusters.label = 'Show all clusters' button_show_all_clusters.on_click(on_button_show_all_clusters) # select the clusters that the selected points belong to Button # (that will then drive the selection of these spikes on t-sne through the update of the clusters_table source) button_show_clusters_of_selected_points = Button(label='Show clusters of selected points') def on_button_show_clusters_change(): print('Hello') global clusters_of_all_spikes currently_selected_spike_indices = tsne_source.selected['1d']['indices'] cluster_info = load_cluster_info(cluster_info_file) clusters_selected = [] new_indices_to_select = [] update_data_table() for spike_index in currently_selected_spike_indices: if clusters_of_all_spikes[spike_index] not in clusters_selected: clusters_selected.append(clusters_of_all_spikes[spike_index]) indices_in_cluster = cluster_info.iloc[clusters_of_all_spikes[spike_index]].Spike_Indices new_indices_to_select.append(indices_in_cluster) if len(new_indices_to_select) > 0: old = clusters_table.source.selected clusters_table.source.selected['1d']['indices'] = clusters_selected new = clusters_table.source.selected clusters_table.source.trigger('selected', old, new) for c in np.arange(len(clusters_selected)): clusters_selected[c] = cluster_info.index[clusters_selected[c]] button_show_clusters_of_selected_points.on_click(on_button_show_clusters_change) # merge clusters Button button_merge_clusters_of_selected_points = Button(label='Merge clusters of selected points') def on_button_merge_clusters_change(): global clusters_of_all_spikes currently_selected_spike_indices = tsne_source.selected['1d']['indices'] cluster_info = load_cluster_info(cluster_info_file) clusters_selected = [] for spike_index in currently_selected_spike_indices: if clusters_of_all_spikes[spike_index] not in clusters_selected: clusters_selected.append(clusters_of_all_spikes[spike_index]) if len(clusters_selected) > 0: clusters_selected = np.sort(clusters_selected) clusters_selected_names = [] for cluster_index in clusters_selected: clusters_selected_names.append(cluster_info.iloc[cluster_index].name) cluster_name = clusters_selected_names[0] add_cluster_to_cluster_info(cluster_info_file, cluster_name, currently_selected_spike_indices) i = 0 for c in np.arange(1, len(clusters_selected)): cluster_info = remove_cluster_from_cluster_info(cluster_info_file, cluster_info.iloc[clusters_selected[c] - i].name, unassign=False) i = i + 1 # Every time you remove a cluster the original index of the remaining clusters drops by one update_data_table() user_info_edit.value = 'Clusters '+ ', '.join(clusters_selected_names) + ' merged to cluster ' + cluster_name button_merge_clusters_of_selected_points.on_click(on_button_merge_clusters_change) # delete cluster Button button_delete_cluster = Button(label='Delete selected cluster(s)') def on_button_delete_cluster(): global selected_cluster_names for cluster_name in selected_cluster_names: remove_cluster_from_cluster_info(cluster_info_file, cluster_name) user_info_edit.value = 'Deleted clusters: ' + ', '.join(selected_cluster_names) update_data_table() button_delete_cluster.on_click(on_button_delete_cluster) # select cluster to move selected points to Select select_cluster_to_move_points_to = Select(title="Assign selected points to cluster:", value="No cluster selected") options = list(cluster_info_data_source.data['Cluster']) options.insert(0, 'No cluster selected') select_cluster_to_move_points_to.options = options def move_selected_points_to_cluster(attr, old, new): global currently_selected_spike_indices if len(currently_selected_spike_indices) > 0 and new is not 'No cluster selected': remove_spikes_from_all_clusters(cluster_info_file, currently_selected_spike_indices) add_spikes_to_cluster(cluster_info_file, new, currently_selected_spike_indices) update_data_table() select_cluster_to_move_points_to.value = 'No cluster selected' user_info_edit.value = 'Selected clusters = ' + new select_cluster_to_move_points_to.on_change('value', move_selected_points_to_cluster) # undo selection button undo_selected_points_button = Button(label='Undo last selection') def on_button_undo_selection(): global previously_selected_spike_indices tsne_source.selected['1d']['indices'] = previously_selected_spike_indices old = new = tsne_source.selected tsne_source.trigger('selected', old, new) undo_selected_points_button.on_click(on_button_undo_selection) # Sliders ------------------- # use the fake data trick to call the callback only when the mouse is released (mouseup only works for CustomJS) # change visibility of non selected points Slider slider_non_selected_visibility = Slider(start=0, end=1, value=0.2, step=.02, callback_policy='mouseup', title='Alpha of not selected points', width=slider_size[0], height=slider_size[1]) def on_slider_change_non_selected_visibility(attrname, old, new): global non_selected_points_alpha if len(source_fake_nsv.data['value']) > 0: non_selected_points_alpha = source_fake_nsv.data['value'][0] old = new = tsne_source.selected tsne_source.trigger('selected', old, new) source_fake_nsv = ColumnDataSource(data=dict(value=[])) source_fake_nsv.on_change('data', on_slider_change_non_selected_visibility) slider_non_selected_visibility.callback = CustomJS(args=dict(source=source_fake_nsv), code=""" source.data = { value: [cb_obj.value] } """) # change size of non selected points Slider slider_non_selected_size = Slider(start=0.5, end=10, value=2, step=0.5, callback_policy='mouseup', title='Size of not selected points', width=slider_size[0], height=slider_size[1]) def on_slider_change_non_selected_size(attrname, old, new): global non_selected_points_size if len(source_fake_nss.data['value']) > 0: non_selected_points_size = source_fake_nss.data['value'][0] old = new = tsne_source.selected tsne_source.trigger('selected', old, new) source_fake_nss = ColumnDataSource(data=dict(value=[])) source_fake_nss.on_change('data', on_slider_change_non_selected_size) slider_non_selected_size.callback = CustomJS(args=dict(source=source_fake_nss), code=""" source.data = { value: [cb_obj.value] } """) # change size of selected points Slider slider_selected_size = Slider(start=0.5, end=10, value=2, step=0.5, callback_policy='mouseup', title='Size of selected points', width=slider_size[0], height=slider_size[1]) def on_slider_change_selected_size(attrname, old, new): global selected_points_size if len(source_fake_ss.data['value']) > 0: selected_points_size = source_fake_ss.data['value'][0] old = new = tsne_source.selected tsne_source.trigger('selected', old, new) source_fake_ss = ColumnDataSource(data=dict(value=[])) source_fake_ss.on_change('data', on_slider_change_selected_size) slider_selected_size.callback = CustomJS(args=dict(source=source_fake_ss), code=""" source.data = { value: [cb_obj.value] } """) # ------------------------------------------- # Layout and session setup ------------------ # align and make layout spike_figure.min_border_top = 50 spike_figure.min_border_right = 10 hist_figure.min_border_top = 50 hist_figure.min_border_left = 10 tsne_figure.min_border_right = 50 if k4: lay = row(column(tsne_figure, row(slider_non_selected_visibility, slider_non_selected_size, slider_selected_size), row(spike_figure, hist_figure), user_info_edit), column(clusters_table, button_show_clusters_of_selected_points, button_merge_clusters_of_selected_points, button_delete_cluster, select_cluster_to_move_points_to, new_cluster_name_edit, button_show_all_clusters, undo_selected_points_button, heatmap_plot)) else: lay = row(column(tsne_figure, row(spike_figure, hist_figure)), column(row(heatmap_plot, column(slider_non_selected_visibility, slider_non_selected_size, slider_selected_size)), user_info_edit), column(clusters_table, button_show_clusters_of_selected_points, button_merge_clusters_of_selected_points, button_delete_cluster, select_cluster_to_move_points_to, new_cluster_name_edit, button_show_all_clusters, undo_selected_points_button)) session = push_session(curdoc()) session.show(lay) # open the document in a browser session.loop_until_closed() # run forever, requires stopping the interpreter in order to stop :)
# WANT TO DO THESE CUTS IN THE SUBROUTINE #spectrum_template.data['flux_cut'] = (spectrum_template.data['f']) #spectrum_template.data['flux_cut'][np.where(np.array(spectrum_template.data['w']) < 1200.)] = -999. #spectrum_template.data['flux_cut'][np.where(np.array(spectrum_template.data['w']) > 1700.)] = -999. # fake source for managing callbacks source = ColumnDataSource(data=dict(value=[])) source.on_change('data', update_data) # Set up widgets and their callbacks (faking the mouseup policy via "source" b/c functional callback doesn't do that. template = Select(title="Template Spectrum", value="QSO", options=["QSO", "10 Myr Starburst", "O5V Star", "G2V Star", "Classical T Tauri", "M1 Dwarf", "Orion Nebula", \ "Starburst, No Dust", "Starburst, E(B-V) = 0.6"]) redshift = Slider(title="Redshift", value=0.0, start=0., end=1.0, step=0.02, callback_policy='mouseup') redshift.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) magnitude = Slider(title="Magnitude [AB]", value=21., start=15., end=25.0, step=0.1, callback_policy='mouseup') magnitude.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) grating = Select(title="Grating / Setting", value="G150M (R = 30,000)", \ options=["G120M (R = 30,000)", "G150M (R = 30,000)", "G180M (R = 30,000)", "G155L (R = 5,000)", "G145LL (R = 500)"]) aperture= Slider(title="Aperture (meters)", value=12., start=2., end=20.0, step=1.0, callback_policy='mouseup') aperture.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) exptime = Slider(title="Exposure Time [hr]", value=1.0, start=0.1, end=10.0, step=0.1, callback_policy='mouseup') exptime.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """)
life_points.data['r'] = np.array(life_points.data['alpha']) * 0. + ( 0.3 * i + 0.5) # Set up widgets with "fake" callbacks fake_callback_source1 = ColumnDataSource(data=dict(value=[])) fake_callback_source1.on_change('data', update_data) aperture = Slider(title="Aperture (meters)", value=12., start=4., end=20.0, step=4.0, callback_policy='mouseup', width=400) aperture.callback = CustomJS(args=dict(source=fake_callback_source1), code=""" source.data = { value: [cb_obj.value] } """) contrast = Slider(title="Log (Contrast)", value=-10, start=-11.0, end=-9, step=1.0, callback_policy='mouseup', width=400) contrast.callback = CustomJS(args=dict(source=fake_callback_source1), code=""" source.data = { value: [cb_obj.value] } """) iwa = Slider(title="Inner Working Angle (l/D)", value=1.5, start=1.5,
link.style.visibility = 'hidden'; link.dispatchEvent(new MouseEvent('click')); } } else { alert('No data selected!'); } """) data_freq.on_change('data', data_freq_sld_callback) data_freq_sld.on_change('value', data_freq_on_title) data_freq_sld.callback = CustomJS(args=dict( data_freq=data_freq, conn_status=conn_status, ), code=""" if(!conn_status.value) { console.log("This application must be connected to ESP device to change it's acquisition frequency."); } data_freq.data = { value: [cb_obj.value] } """) ############################################################################## # Periodic streaming update call def periodic_stream(): global new_data if new_data: util.doc_next_tick(doc,
// Emit data source for plot to be updated source.change.emit(); """ # Done! Not too bad. Now let's define the arguments for the callback function # Define arguments for JavaScript callback function cb_args = {'source': source, 'sSlider': s_select, 'xoSlider': x_init} # Asign arguments to function cb = CustomJS(args=cb_args, code=cb_script) # Now we must assign this callback function to each of the sliders. What this means is that we must indicate that every time the slider value is changed, the `JavaScript` callback function must be executed. # Assign callback function to widgets x_init.callback = cb s_select.callback = cb x_init.js_on_change('value', cb) s_select.js_on_change('value', cb) # Alright. Now everything is setup for our interactive plot! Now we just need to define the bokeh plot. # Define bokeh axis x_allele_ax = bokeh.plotting.figure(width=300, height=275, x_axis_label='time (a.u.)', y_axis_label='allele frequency', y_range=[-0.05, 1.05]) # Populate the plot with our line coming from the Data Source x_allele_ax.line(x='time', y='x_allele', line_width=2, source=source)
s=float(slider_smooth.value)) # t=x_knots, source_spline.data = dict(x=x, y=splev(x, tck)) p.line('x', 'y', color="red", source=source_spline) source_knots.data = dict(x=tck[0]) # cs = CubicSpline(sorted_x, sorted_y, axis=0, bc_type='natural', extrapolate=None) # source_spline = ColumnDataSource(data=dict(x=x, y=cs(x))) # This way the slider triggers only when hte mouse is released # Otherwise at higher curve orders with many points it could get slow # source_dummy is just used to trigger the callback to update_source_spline source_dummy = ColumnDataSource(data=dict(value=[])) source_dummy.on_change('data', update_source_spline) # slider_smooth.on_change('value', update_source_spline) slider_smooth.callback = CustomJS(args=dict(source_dummy=source_dummy), code=""" source_dummy.data = { value: [cb_obj.value] } """) def display_event(): return CustomJS(args=dict(p=p, source_datapoints_JS=source_datapoints), code=""" function distance(p1, p2) { return Math.sqrt( Math.pow(p1[0]-p2[0] , 2) + Math.pow(p1[1]-p2[1], 2) ); } var x_scale = p.inner_width / (p.x_range.end - p.x_range.start) var y_scale = p.inner_height / (p.y_range.end - p.y_range.start) var CIRCLE_RADIUS = %s;
def plot_datatable(df): df = df.copy() # deal with some atomic mass values of the form '[98]' df['atomic mass'] = df['atomic mass'].str.extract('([\d\.]+)').astype( float) columns = [ TableColumn(field='atomic number', title='Atomic Number'), TableColumn(field='symbol', title='Symbol'), TableColumn(field='name', title='Name'), TableColumn(field='metal', title='Type'), TableColumn(field='atomic mass', title='Atomic Mass') ] column_names = [tc.field for tc in columns] source = ColumnDataSource(df[column_names]) original_source = ColumnDataSource(df) data_table = DataTable(source=source, columns=columns, height=600, editable=False) widget_callback_code = """ var filtered_data = filtered_source.get('data'); var original_data = original_source.get('data'); var element_type = element_type_select.get('value'); var min_mass = min_mass_slider.get('value'); // now construct the new data object based on the filtered values for (var key in original_data) { filtered_data[key] = []; for (var i = 0; i < original_data[key].length; ++i) { if ((element_type === "ALL" || original_data["metal"][i] === element_type) && (original_data['atomic mass'][i] >= min_mass)) { filtered_data[key].push(original_data[key][i]); } } } target_obj.trigger('change'); filtered_source.trigger('change'); """ # define the filter widgets, without callbacks for now element_type_list = ['ALL'] + df['metal'].unique().tolist() element_type_select = Select(title="Element Type:", value=element_type_list[0], options=element_type_list) min_mass_slider = Slider(start=0, end=df['atomic mass'].max(), value=1, step=1, title="minimum atomic mass") # now define the callback objects now that the filter widgets exist arg_dct = dict(filtered_source=source, original_source=original_source, element_type_select=element_type_select, min_mass_slider=min_mass_slider, target_obj=data_table) generic_callback = CustomJS(args=arg_dct, code=widget_callback_code) # connect the callbacks to the filter widgets element_type_select.callback = generic_callback min_mass_slider.callback = generic_callback # create a button to collect the filtered results # for now, just send json to new window send_button_callback_code = """ var filtered_data = filtered_source.get('data'); var action_items = []; for (var i = 0; i < filtered_data['atomic number'].length; ++i) { var item = new Object(); for (var key in filtered_data) { item[key] = filtered_data[key][i] } action_items.push(item); } var new_window = window.open("data:text/html," + encodeURIComponent(JSON.stringify(action_items)), "_blank", "location=yes,height=570,width=520,scrollbars=yes,status=yes"); new_window.focus(); """ send_button_callback = CustomJS(args=dict(filtered_source=source), code=send_button_callback_code) send_button = Button(label="Send", type="success", callback=send_button_callback) input_widgets = HBox(children=[ HBox(children=[ element_type_select, ]), HBox(children=[min_mass_slider]), HBox(children=[send_button]), ]) p = vplot(input_widgets, data_table) show(p)
end=len(ch2_data[0]), value=1, step=1, title="Peak Slider") peak_start = Span( location=slider2.value, dimension='height', line_color='red', line_dash='dashed', line_width=1) p.add_layout(peak_start) slider2.callback = CustomJS(args=dict(span=peak_start, slider=slider2), code=""" span.location = slider.value; """) cb_up = CustomJS(args=dict(span=peak_start, slider=slider2), code=""" span.location = span.location + 1; slider.value = span.location; """) cb_down = CustomJS(args=dict(span=peak_start, slider=slider2), code=""" span.location = span.location - 1; slider.value = span.location; """) button_up = Button(label=">", callback=cb_up) button_down = Button(label="<", callback=cb_down) button_group = RadioButtonGroup(
d16, d17, d18, d19, d20 ]) source = ColumnDataSource(data=dict(value=[])) source.on_change('data', update_data) # Set up widgets aperture = Slider(title="Aperture (meters)", value=12., start=4., end=20.0, step=4.0, callback_policy='mouseup') aperture.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) contrast = Slider(title="Log (Contrast)", value=-10, start=-11.0, end=-9, step=1.0, callback_policy='mouseup') contrast.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) iwa = Slider(title="Inner Working Angle (l/D)", value=1.5, start=1.5, end=4.0,
###################################### # SET UP ALL THE WIDGETS AND CALLBACKS ###################################### source = ColumnDataSource(data=dict(value=[])) source.on_change('data', update_data) exptime = Slider(title="Integration Time (hours)", value=20., start=10., end=100.0, step=1.0, callback_policy='mouseup') exptime.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) distance = Slider(title="Distance (parsec)", value=10., start=2., end=50.0, step=1.0, callback_policy='mouseup') distance.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) radius = Slider(title="Planet Radius (R_Earth)", value=1.0, start=0.5, end=3.,
def create_ui(self, data): self.logger.info("number of data items %d", len(data)) # Create data source and data table # path, score, software_id, featcnt, featfreq, app name, app path, decision, status, comment, active in play, still voilating decision_editor = SelectEditor(options=[ "Unprocessed", "GPL Violation", "LGPL Violation", "Open Source App", "False Positive", "False Negative (LGPL)", "False Negative (GPL)" ]) status_editor = SelectEditor(options=[ "Unprocessed", "Emailed", "Confirmed", "Denied", "Authorized" ]) if self.app_info: columns = [ TableColumn(field="myindex", title="Id"), TableColumn(field="path", title="File Path"), TableColumn(field="score", title="Score"), TableColumn(field="normscore", title="NormScore", formatter=NumberFormatter(format="0.00")), TableColumn(field="partial", title="PartialMatch"), TableColumn(field="repo_id", title="Repo ID"), TableColumn(field="software_name", title="OSS"), TableColumn(field="version", title="Version"), TableColumn( field="featcnt", title="FeatCount", ), TableColumn( field="featfreq", title="FeatFreq", ), TableColumn(field="package_name", title="Package"), TableColumn(field="app_path", title="App Path"), TableColumn(field="app_count", title="App Count"), TableColumn(field="decision", title="Decision", editor=decision_editor), TableColumn(field="status", title="Status", editor=status_editor), TableColumn(field="comment", title="Comment"), # I am not sure whether we should add these two fields here. # TableColumn(field="active", title="Active in Play"), # TableColumn(field="still_violating", title="Still Violating"), ] else: template_str = '<a href="' + self.REPO_URL + '/<%= value %>"><%= value %></a>' columns = [ TableColumn( field="myindex", title="Id", ), TableColumn(field="name", title="Name"), TableColumn(field="score", title="Score", formatter=NumberFormatter(format="0.00")), TableColumn(field="normscore", title="NormScore", formatter=NumberFormatter(format="0.00")), TableColumn(field="partial", title="PartialMatch"), TableColumn(field="repo_id", title="RepoID"), TableColumn( field="software_name", title="OSS", formatter=HTMLTemplateFormatter(template=template_str)), TableColumn(field="featcnt", title="FeatCount", formatter=NumberFormatter(format="0,000,000")), TableColumn(field="featfreq", title="FeatFreq", formatter=NumberFormatter(format="0,000,000")), TableColumn(field="version", title="Version"), TableColumn(field="decision", title="Decision", editor=decision_editor), TableColumn(field="status", title="Status", editor=status_editor), TableColumn(field="comment", title="Comment"), TableColumn(field="path", title="Path"), ] # source is the displayed table, and can be modified by user # original_source is the original data, it is the base, and can only be modified by the program self.source = ColumnDataSource(self._data) self.original_source = ColumnDataSource(self._data) self.data_table = DataTable(source=self.source, columns=columns, width=2000, height=2000, editable=True, sortable=True) # Disable sortable for now! # selector or filters # reference link for callback: https://gist.github.com/dennisobrien/450d7da20daaba6d39d0 min_matching_score_slider = Slider(start=0, end=2, value=0.3, step=.01, title="Minimum Matching Score") max_matching_score_slider = Slider(start=0, end=2, value=0.7, step=.01, title="Maximum Matching Score") featfreq_slider = Slider(start=0, end=10000, value=0, step=1, title="Minimum Matching Num of Features") featcnt_slider = Slider(start=0, end=10000, value=50, step=1, title="Minimum Feature Count is OSS") kind_select = Select(value="All", options=["All", "Java", "Native"]) file_select = Select(value="Name", options=["Name", "MD5", "Path"]) search_input = TextInput(value=None, title="Enter library to search", callback=None) search_button = Button(label="Search", button_type="success") download_callback_code = """ var data = source.get('data'); var filetext = 'Id,File Name,Matching Score,Normalized Matching Score,Repo ID,Software Name,Feature Count,Feature Freq.,Version,Decision,Status,Comment,File Path\\n'; var order = ['myindex', 'name', 'score', 'normscore', 'repo_id', 'software_name', 'featcnt', 'featfreq', 'version', 'decision', 'status', 'comment', 'path']; for (var i = 0; i < data['path'].length; ++i) { var currRow = []; for (var item in order) { key = order[item] currRow.push(data[key][i]); } var joined = currRow.join().concat('\\n'); filetext = filetext.concat(joined); } var filename = 'violations.csv'; var blob = new Blob([filetext], { type: 'text/csv;charset=utf-8;' }); //addresses IE if (navigator.msSaveBlob) { //navigator.msSaveBlob(blob, filename); } else { var link = document.createElement("a"); link = document.createElement('a'); link.href = URL.createObjectURL(blob); link.download = filename; link.target = "_blank"; link.style.visibility = 'hidden'; link.dispatchEvent(new MouseEvent('click')); } """ # enable downloading of results as a csv file download_button = Button(label="Download", button_type="success") download_button.callback = CustomJS(args=dict(source=self.source), code=download_callback_code) # enable comparison of selected rows compare_button = Button(label="Compare", button_type="success") compare_button.on_click(self.compare_callback) # update on change #controls = [min_matching_score_slider, max_matching_score_slider, featfreq_slider, \ # featcnt_slider, kind_select, file_select, button] #for item in controls: # item.on_change('value', lambda attr, old, new: self.update_source(item)) combined_callback_code = """ var data = source.get('data'); var original_data = original_source.get('data'); var min_score = min_matching_score_slider.get('value'); var max_score = max_matching_score_slider.get('value'); var search_input = search_input.get('value'); var min_featfreq = featfreq_slider.get('value'); var min_featcnt = featcnt_slider.get('value'); var kind = kind_select.get('value'); console.log("min score: " + min_score + ", max score: " + max_score + ", min_featfreq: " + min_featfreq + ", min_featcnt" + min_featcnt + ", kind" + kind); var java_suffix = ".dex"; var native_suffix = ".so"; console.log("searchinput: " + search_input); var re; if (search_input) { re = new RegExp(search_input); } else { re = new RegExp(".*"); } for (var key in original_data) { data[key] = []; for (var i = 0; i < original_data['path'].length; ++i) { if ((original_data['normscore'][i] >= min_score) && (original_data['normscore'][i] <= max_score) && (original_data['featfreq'][i] >= min_featfreq) && (original_data['featcnt'][i] >= min_featcnt)) { // filter by java if (kind == "Java" && original_data['path'][i].indexOf(java_suffix, original_data['path'][i].length - java_suffix.length) === -1) continue; // filter by native if (kind == "Native" && original_data['path'][i].indexOf(native_suffix, original_data['path'][i].length - native_suffix.length) === -1) continue; // filter by search regex if (!re.test(original_data['name'][i])) { console.log("mismatch: " + original_data['name'][i]); continue; } // this row is the expected kind data[key].push(original_data[key][i]); } } } source.trigger('change'); target.trigger('change'); """ generic_callback = CustomJS(args=dict( source=self.source, original_source=self.original_source, search_input=search_input, max_matching_score_slider=max_matching_score_slider, min_matching_score_slider=min_matching_score_slider, featfreq_slider=featfreq_slider, featcnt_slider=featcnt_slider, kind_select=kind_select, target=self.data_table), code=combined_callback_code) min_matching_score_slider.callback = generic_callback max_matching_score_slider.callback = generic_callback featfreq_slider.callback = generic_callback featcnt_slider.callback = generic_callback search_button.callback = generic_callback kind_select.callback = generic_callback # install callback when a row gets selected self.source.on_change('selected', self.selected_callback) ########################################################### # Main ########################################################### controls = [min_matching_score_slider, max_matching_score_slider, featfreq_slider, \ featcnt_slider, kind_select, file_select, search_input, search_button, \ download_button, compare_button] plots_box = widgetbox(*controls, width=800, sizing_mode="fixed") layout = column(plots_box, self.data_table, sizing_mode="fixed") return layout
from bokeh.io import curdoc from bokeh.layouts import column from bokeh.plotting import figure from bokeh.models.callbacks import CustomJS from bokeh.models.sources import ColumnDataSource from bokeh.models.widgets import Slider # this is the real callback that we want to happen on slider mouseup def cb(attr, old, new): print("UPDATE", source.data['value']) #p.x_range=range(0, int(source.data['value'])) # This data source is just used to communicate / trigger the real callback source = ColumnDataSource(data=dict(value=[])) source.on_change('data', cb) # a figure, just for example p = figure(x_range=(0,1), y_range=(0,1)) # add a slider with a CustomJS callback and a mouseup policy to update the source slider = Slider(start=1, end=10, value=1, step=0.1, callback_policy='mouseup') slider.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) curdoc().add_root(column(slider, p)) # make sure to add the source explicitly curdoc().add_root(source)
# x axis selection box x_axis_selector_title = Div(text="""X Axis:""", height=10) x_axis_selector = RadioButtonGroup(labels=x_axis_options, active=0) x_axis_selector.on_click(change_x_axis) # toggle second axis button toggle_second_axis_button = Button(label="Toggle Second Axis", button_type="success") toggle_second_axis_button.on_click(toggle_second_axis) # averaging slider # This data source is just used to communicate / trigger the real callback averaging_slider_dummy_source = ColumnDataSource(data=dict(value=[])) averaging_slider_dummy_source.on_change('data', update_averaging) averaging_slider = Slider(title="Averaging window", start=1, end=101, step=10, callback_policy='mouseup') averaging_slider.callback = CustomJS(args=dict(source=averaging_slider_dummy_source), code=""" source.data = { value: [cb_obj.value] } """) # group properties checkbox group_cb = CheckboxGroup(labels=["Show statistics bands", "Ungroup signals"], active=[]) group_cb.on_click(toggle_group_property) # color selector color_selector_title = Div(text="""Select Color:""") crsource = ColumnDataSource(data=dict(x=crx, y=cry, crcolor=crcolor, RGBs=crRGBs)) color_selector = figure(x_range=(0, color_resolution), y_range=(0, 10), plot_width=300, plot_height=40, tools='tap') color_selector.axis.visible = False color_range = color_selector.rect(x='x', y='y', width=1, height=10, color='crcolor', source=crsource)
###################################### # SET UP ALL THE WIDGETS AND CALLBACKS ###################################### source = ColumnDataSource(data=dict(value=[])) source.on_change('data', update_data) exptime = Slider(title="Integration Time (hours)", value=24., start=1., end=1000.0, step=1.0, callback_policy='mouseup') exptime.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) distance = Slider(title="Log Distance (parsec)", value=1., start=-5.3, end=10.0, step=0.1, callback_policy='mouseup') distance.callback = CustomJS(args=dict(source=source), code=""" source.data = { value: [cb_obj.value] } """) radius = Slider(title="Log Object Radius (R_Sun)", value=0, start=-10, end=20.,