def make_table(i, df): event_header = [ col for col in df.columns if (col.startswith('__') or col == 'event') ] df = df[order_as(df.columns, event_header)] if df.index.name in df.columns: df.index = df.index.copy(deep=False) df.index.name = '' df_widget = pn.widgets.DataFrame( df, name=df.attrs.get('name', f'dataframe #{i}'), formatters={'bool': { 'type': 'tickCross' }}, # Disable edition of the dataframe disabled=True, sortable=False, # Ensure some columns are always displayed # Note: Tabulator requires a list of column names instead. frozen_columns=len(event_header) + 1, height=400, autosize_mode='fit_viewport', row_height=25, # Only relevant for pn.widgets.Tabulator #theme='simple', #selectable='checkbox', # Avoid transferring too much data at once to the browser #pagination='remote', #page_size=100, ) return df_widget
def make_table(i, df): event_header = [ col for col in df.columns if (col.startswith('__') or col == 'event') ] df = df[order_as(df.columns, event_header)] if df.index.name in df.columns: df.index = df.index.copy(deep=False) df.index.name = '' df_widget = pn.widgets.Tabulator( df, name=df.attrs.get('name', f'dataframe #{i}'), formatters={'bool': { 'type': 'tickCross' }}, # Disable edition of the dataframe disabled=True, # sortable=False, # Ensure some columns are always displayed frozen_columns=event_header, # For pn.widgets.DataFrame. # frozen_columns=len(event_header) + 1, height=400, # autosize_mode='fit_viewport', row_height=25, # Only relevant for pn.widgets.Tabulator theme='simple', selectable='toggle', ) return df_widget
def _get_rtapp_phases(self, event, task, wlgen_profile=None): df = self.df_rtapp_loop(task, wlgen_profile=wlgen_profile) df = df[df.event == event] # Sort START/END phase loop event from newers/older and... if event == 'start': df = df[df.phase_loop == 0] elif event == 'end': df = df.sort_values(by=['phase_loop', 'thread_loop'], ascending=False) # ... keep only the newest/oldest event grouped = df.groupby( ['__comm', '__pid', 'phase', 'event'], observed=True, sort=False ) df = grouped.head(1) # Reorder the index and keep only required cols kept_cols = ['__comm', '__pid', 'phase', 'properties'] kept_cols = order_as( set(kept_cols) & set(df.columns), order_as=kept_cols ) df = ( df.sort_index()[kept_cols] .reset_index() .set_index([col for col in kept_cols if col != 'properties']) ) return df
def format_release(name, sections): title = f'{name}\n{len(name) * "="}\n' body = '\n\n'.join( format_section(marker, _msgs) for marker, _msgs in order_as( sections.items(), order_as=MARKERS, key=itemgetter(0), )) return f'{title}\n{body}'
def _df_all_events(self, events, field_sep=' ', fields_as_cols=None, event_as_col=True): """ Split implementation to be able to use the cache """ if fields_as_cols is None: fields_as_cols = ['__comm', '__pid', '__cpu'] else: fields_as_cols = list(fields_as_cols) trace = self.trace if not events: df = pd.DataFrame( dict.fromkeys((['info'] + fields_as_cols + ['event'] if event_as_col else []), [])) else: if event_as_col: fmt = '{fields}' else: fmt = '{{event:<{max_len}}}: {{fields}}'.format(max_len=max( len(event) for event in events)) fields_as_cols_set = set(fields_as_cols) def make_info_row(row, event): fields = field_sep.join(f'{key}={value}' for key, value in row.iteritems() if key not in fields_as_cols_set) return fmt.format( event=event, fields=fields, ) def make_info_df(event): df = trace.df_event(event) df = pd.DataFrame( { 'info': df.apply(make_info_row, axis=1, event=event), **{field: df[field] for field in fields_as_cols} }, index=df.index, ) if event_as_col: df['event'] = event return df df = pd.concat(map(make_info_df, events)) df.sort_index(inplace=True) df_update_duplicates(df, inplace=True) # Reorder the columns to provide a better kernelshark-like display columns_order = ( [col for col in df.columns if col.startswith('__')] + (['event'] if event_as_col else []) + ['info']) df = df[order_as(df.columns, columns_order)] df.attrs['name'] = 'events' return df