def main(): _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) print('Total tuples:', len(_tuples)) compute_tuples(_tuples)
def compute_fiber_densities(_offset_y): _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': _offset_y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows( _arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _saturation=True, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple][:2] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _experiments_saturation = { _key: [(_fiber_densities[_tuple][2], _fiber_densities[_tuple][1]) for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _tuples_by_experiment = organize.by_experiment(_tuples) _correlations_array = [] _saturation_array = [] for _experiment in _tuples_by_experiment: print('Experiment:', _experiment) _experiment_tuples = _tuples_by_experiment[_experiment] for _tuple in tqdm(_experiment_tuples, desc='Main loop'): _, _series, _group = _tuple _left_cell_fiber_densities = _experiments_fiber_densities[( _experiment, _series, _group, 'left_cell')] _right_cell_fiber_densities = _experiments_fiber_densities[( _experiment, _series, _group, 'right_cell')] _properties = load.group_properties(_experiment, _series, _group) _left_cell_fiber_densities = compute.remove_blacklist( _experiment, _series, _properties['cells_ids']['left_cell'], _left_cell_fiber_densities) _right_cell_fiber_densities = compute.remove_blacklist( _experiment, _series, _properties['cells_ids']['right_cell'], _right_cell_fiber_densities) _left_cell_fiber_densities_filtered, _right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _left_cell_fiber_densities, _right_cell_fiber_densities ) # ignore small arrays if len(_left_cell_fiber_densities_filtered ) < compute.minimum_time_frames_for_correlation( _experiment): continue _correlation = compute_lib.correlation( compute_lib.derivative(_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative(_right_cell_fiber_densities_filtered, _n=DERIVATIVE)) # saturation _left_cell_saturation = _experiments_saturation[(_experiment, _series, _group, 'left_cell')] _right_cell_saturation = _experiments_saturation[(_experiment, _series, _group, 'right_cell')] _left_cell_saturation = compute.remove_blacklist( _experiment, _series, _properties['cells_ids']['left_cell'], _left_cell_saturation) _right_cell_saturation = compute.remove_blacklist( _experiment, _series, _properties['cells_ids']['right_cell'], _right_cell_saturation) _left_cell_saturation_filtered, _right_cell_saturation_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _left_cell_saturation, _right_cell_saturation ) _left_saturation_fraction_last_time_frame = _left_cell_saturation_filtered[ -1] _right_saturation_fraction_last_time_frame = _right_cell_saturation_filtered[ -1] _group_saturation_fraction_mean = \ (_left_saturation_fraction_last_time_frame + _right_saturation_fraction_last_time_frame) / 2 _correlations_array.append(_correlation) _saturation_array.append(_group_saturation_fraction_mean) print('Total points:', len(_correlations_array)) print('Wilcoxon of correlations around the zero:') print(wilcoxon(_correlations_array)) print('Pearson correlation of correlations and saturation fraction mean:') print( compute_lib.correlation(_correlations_array, _saturation_array, _with_p_value=True)) return _correlations_array, _saturation_array
def main(_high_temporal_resolution=True): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_time_frames_amount( _tuples, _time_frames=MOVING_WINDOW_LENGTH[_high_temporal_resolution]) _tuples = filtering.by_pair_distance_range( _tuples, _distance_range=PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows( _arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } for _tuple in _tuples: _correlations = [] _experiment, _series_id, _group = _tuple _left_cell_fiber_densities = \ _experiments_fiber_densities[(_experiment, _series_id, _group, 'left_cell')] _right_cell_fiber_densities = \ _experiments_fiber_densities[(_experiment, _series_id, _group, 'right_cell')] _properties = load.group_properties(_experiment, _series_id, _group) _left_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['left_cell'], _left_cell_fiber_densities) _right_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['right_cell'], _right_cell_fiber_densities) for _start_time_frame in \ range(0, END_TIME_FRAME[_high_temporal_resolution], TIME_FRAME_STEP[_high_temporal_resolution]): _left_cell_fiber_densities_window = _left_cell_fiber_densities[ _start_time_frame:_start_time_frame + MOVING_WINDOW_LENGTH[_high_temporal_resolution]] _right_cell_fiber_densities_window = _right_cell_fiber_densities[ _start_time_frame:_start_time_frame + MOVING_WINDOW_LENGTH[_high_temporal_resolution]] _left_cell_fiber_densities_filtered, _right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _left_cell_fiber_densities_window, _right_cell_fiber_densities_window) # ignore small arrays if len(_left_cell_fiber_densities_filtered ) < MOVING_WINDOW_LENGTH[_high_temporal_resolution]: _correlations.append(None) continue _correlations.append( compute_lib.correlation( compute_lib.derivative(_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _right_cell_fiber_densities_filtered, _n=DERIVATIVE))) # plot _temporal_resolution = compute.temporal_resolution_in_minutes( _experiment) _fig = go.Figure(data=go.Scatter( x=np.arange(start=0, stop=len(_correlations), step=1) * _temporal_resolution * TIME_FRAME_STEP[_high_temporal_resolution], y=_correlations, mode='lines+markers', line={'dash': 'solid'}), layout={ 'xaxis': { 'title': 'Window start time (minutes)', 'zeroline': False }, 'yaxis': { 'title': 'Inner correlation', 'zeroline': False } }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_' + str(_experiment) + '_' + str(_series_id) + '_' + str(_group))
def compute_fiber_densities(_band=True, _high_temporal_resolution=False): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples, _band=_band) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows( _arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _tuples_by_experiment = organize.by_experiment(_tuples) _distances_from_y_equal_x = [] _z_positions_array = [] for _experiment in _tuples_by_experiment: print('Experiment:', _experiment) _experiment_tuples = _tuples_by_experiment[_experiment] for _same_index in tqdm(range(len(_experiment_tuples)), desc='Main loop'): _same_tuple = _experiment_tuples[_same_index] _same_experiment, _same_series, _same_group = _same_tuple _same_left_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'left_cell') ] _same_right_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'right_cell') ] _same_properties = \ load.group_properties(_same_experiment, _same_series, _same_group) _same_left_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['left_cell'], _same_left_cell_fiber_densities) _same_right_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['right_cell'], _same_right_cell_fiber_densities) _same_left_cell_fiber_densities_filtered, _same_right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_left_cell_fiber_densities, _same_right_cell_fiber_densities ) # ignore small arrays if len(_same_left_cell_fiber_densities_filtered ) < compute.minimum_time_frames_for_correlation( _same_experiment): continue _same_correlation = compute_lib.correlation( compute_lib.derivative( _same_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _same_right_cell_fiber_densities_filtered, _n=DERIVATIVE)) _same_group_mean_z_position = \ compute.group_mean_z_position_from_substrate(_same_experiment, _same_series, _same_group) for _different_index in range(len(_experiment_tuples)): if _same_index != _different_index: _different_tuple = _experiment_tuples[_different_index] _different_experiment, _different_series, _different_group = \ _different_tuple for _same_cell_id, _different_cell_id in product( ['left_cell', 'right_cell'], ['left_cell', 'right_cell']): _same_fiber_densities = _experiments_fiber_densities[( _same_experiment, _same_series, _same_group, _same_cell_id)] _different_fiber_densities = _experiments_fiber_densities[ (_different_experiment, _different_series, _different_group, _different_cell_id)] _different_properties = load.group_properties( _different_experiment, _different_series, _different_group) _same_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids'][_same_cell_id], _same_fiber_densities) _different_fiber_densities = compute.remove_blacklist( _different_experiment, _different_series, _different_properties['cells_ids'] [_different_cell_id], _different_fiber_densities) _same_fiber_densities_filtered, _different_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_fiber_densities, _different_fiber_densities ) # ignore small arrays if len(_same_fiber_densities_filtered ) < compute.minimum_time_frames_for_correlation( _different_experiment): continue _different_correlation = compute_lib.correlation( compute_lib.derivative( _same_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _different_fiber_densities_filtered, _n=DERIVATIVE)) _point_distance = compute_lib.distance_from_a_point_to_a_line( _line=[-1, -1, 1, 1], _point=[_same_correlation, _different_correlation]) if _same_correlation > _different_correlation: _distances_from_y_equal_x.append(_point_distance) else: _distances_from_y_equal_x.append(-_point_distance) _z_positions_array.append(_same_group_mean_z_position) print('Total points:', len(_distances_from_y_equal_x)) print('Wilcoxon of distances from y = x around the zero:') print(wilcoxon(_distances_from_y_equal_x)) print( 'Pearson correlation of distances from y = x and z position distances:' ) print( compute_lib.correlation(_distances_from_y_equal_x, _z_positions_array, _with_p_value=True)) return _distances_from_y_equal_x, _z_positions_array
def main(_real_cells=True, _static=False, _band=True, _high_temporal_resolution=False): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False ) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples, _real_pairs=_real_cells) _tuples = filtering.by_fake_static_pairs(_tuples, _fake_static_pairs=_static) _tuples = filtering.by_band(_tuples, _band=_band) _tuples = filtering.by_time_frames_amount(_tuples, compute.minimum_time_frames_for_correlation(_experiments[0])) print('Total tuples:', len(_tuples)) print('Density') _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _time_frame = compute.minimum_time_frames_for_correlation(_experiment) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y_DENSITY, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_point': _time_frame - 1 }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _densities_fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=False) _densities_experiments_fiber_densities = { _key: [_densities_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } print('Correlations') _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping(_experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y_CORRELATION, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _correlations_fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _correlations_experiments_fiber_densities = { _key: [_correlations_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _densities = [] _correlations = [] for _tuple in tqdm(_tuples, desc='Main loop'): _experiment, _series_id, _group = _tuple # density _left_cell_fiber_density = \ _densities_experiments_fiber_densities[(_experiment, _series_id, _group, 'left_cell')][0] _right_cell_fiber_density = \ _densities_experiments_fiber_densities[(_experiment, _series_id, _group, 'right_cell')][0] # not relevant if _left_cell_fiber_density[1] or _right_cell_fiber_density[1]: continue _normalization = load.normalization_series_file_data(_experiment, _series_id) _left_cell_fiber_density_normalized = compute_lib.z_score( _x=_left_cell_fiber_density[0], _average=_normalization['average'], _std=_normalization['std'] ) _right_cell_fiber_density_normalized = compute_lib.z_score( _x=_right_cell_fiber_density[0], _average=_normalization['average'], _std=_normalization['std'] ) _density = (_left_cell_fiber_density_normalized + _right_cell_fiber_density_normalized) / 2 # correlation _left_cell_fiber_densities = \ _correlations_experiments_fiber_densities[(_experiment, _series_id, _group, 'left_cell')] _right_cell_fiber_densities = \ _correlations_experiments_fiber_densities[(_experiment, _series_id, _group, 'right_cell')] _properties = load.group_properties(_experiment, _series_id, _group) _left_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['left_cell'], _left_cell_fiber_densities) _right_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['right_cell'], _right_cell_fiber_densities) _left_cell_fiber_densities_filtered, _right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _left_cell_fiber_densities, _right_cell_fiber_densities ) # ignore small arrays if len(_left_cell_fiber_densities_filtered) < compute.minimum_time_frames_for_correlation(_experiment): continue _correlation = compute_lib.correlation( compute_lib.derivative(_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative(_right_cell_fiber_densities_filtered, _n=DERIVATIVE) ) _densities.append(_density) _correlations.append(_correlation) print('Total tuples:', len(_densities)) print('Pearson correlation of densities and correlations:') print(compute_lib.correlation(_densities, _correlations, _with_p_value=True)) # plot _fig = go.Figure( data=go.Scatter( x=_densities, y=_correlations, mode='markers', marker={ 'size': 5, 'color': '#ea8500' }, showlegend=False ), layout={ 'xaxis': { 'title': 'Fiber density (z-score)', 'zeroline': False, 'range': [-1.1, 15.2], # 'tickmode': 'array', # 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'yaxis': { 'title': 'Correlation', 'zeroline': False, 'range': [-1.1, 1.2], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'shapes': [ { 'type': 'line', 'x0': 0, 'y0': -1, 'x1': 0, 'y1': 1, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': 0, 'y0': -1, 'x1': 15, 'y1': -1, 'line': { 'color': 'black', 'width': 2 } } ] } ) save.to_html( _fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_real_' + str(_real_cells) + '_static_' + str(_static) + '_band_' + str(_band) + '_high_time_' + str(_high_temporal_resolution) + '_y_density_' + str(OFFSET_Y_DENSITY) + '_y_correlation_' + str(OFFSET_Y_CORRELATION) )
def main(): print('Regular experiments') _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_bleb_from_start(_experiments, _from_start=False) _regular_experiments, _regular_offsets_x = compute_fiber(_tuples) print('Bleb experiments') _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=True, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_bleb_from_start(_experiments, _from_start=True) _bleb_experiments_real, _bleb_experiments_fake, _bleb_offsets_x = compute_matched_fiber( _tuples) print('\nWindow distance (cell diameter)', 'Regular # of cells', 'Regular Wilcoxon p-value', 'Bleb # of cells', 'Bleb "real" Wilcoxon p-value', 'Bleb "fake" Wilcoxon p-value', sep='\t') for _offset_x, _regular_experiment, _bleb_experiment_real, _bleb_experiment_fake in \ zip(_regular_offsets_x, _regular_experiments, _bleb_experiments_real, _bleb_experiments_fake): print(round(_offset_x, 2), len(_regular_experiment), wilcoxon(_regular_experiment)[1], len(_bleb_experiment_real), wilcoxon(_bleb_experiment_real)[1], wilcoxon(_bleb_experiment_fake)[1], sep='\t') # bleb real vs. fake print('\nBleb real vs. fake wilcoxon') print('Window distance (cell diameter)', 'Wilcoxon p-value', sep='\t') for _offset_x, _bleb_experiment_real, _bleb_experiment_fake in \ zip(_bleb_offsets_x, _bleb_experiments_real, _bleb_experiments_fake): print(round(_offset_x, 2), wilcoxon(_bleb_experiment_real, _bleb_experiment_fake)[1], sep='\t') # plot regular vs. bleb _fig = go.Figure(data=[ go.Scatter(x=_regular_offsets_x, y=[np.mean(_array) for _array in _regular_experiments], name='No bleb', error_y={ 'type': 'data', 'array': [np.std(_array) for _array in _regular_experiments], 'thickness': 1, 'color': '#005b96' }, mode='markers', marker={ 'size': 15, 'color': '#005b96' }, opacity=0.7), go.Scatter(x=_bleb_offsets_x, y=[np.mean(_array) for _array in _bleb_experiments_real], name='Bleb', error_y={ 'type': 'data', 'array': [np.std(_array) for _array in _bleb_experiments_real], 'thickness': 1, 'color': '#ea8500' }, mode='markers', marker={ 'size': 15, 'color': '#ea8500' }, opacity=0.7) ], layout={ 'xaxis': { 'title': 'Window distance (cell diameter)', 'zeroline': False }, 'yaxis': { 'title': 'Fiber density (z-score)', 'range': [-1.7, 13], 'zeroline': False, 'tickmode': 'array', 'tickvals': [0, 4, 8, 12] }, 'legend': { 'xanchor': 'right', 'yanchor': 'top', 'bordercolor': 'black', 'borderwidth': 2 }, 'shapes': [{ 'type': 'line', 'x0': -0.2, 'y0': -1.5, 'x1': 3.4, 'y1': -1.5, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -0.2, 'y0': -1.5, 'x1': -0.2, 'y1': 13, 'line': { 'color': 'black', 'width': 2 } }] }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_regular_vs_bleb') # plot bleb real vs. bleb fake _fig = go.Figure(data=[ go.Scatter(x=_regular_offsets_x, y=[np.mean(_array) for _array in _bleb_experiments_real], name='Bleb real', error_y={ 'type': 'data', 'array': [np.std(_array) for _array in _bleb_experiments_real], 'thickness': 1, 'color': '#005b96' }, mode='markers', marker={ 'size': 15, 'color': '#005b96' }, opacity=0.7), go.Scatter(x=_bleb_offsets_x, y=[np.mean(_array) for _array in _bleb_experiments_fake], name='Bleb fake', error_y={ 'type': 'data', 'array': [np.std(_array) for _array in _bleb_experiments_fake], 'thickness': 1, 'color': '#ea8500' }, mode='markers', marker={ 'size': 15, 'color': '#ea8500' }, opacity=0.7) ], layout={ 'xaxis': { 'title': 'Window distance (cell diameter)', 'zeroline': False }, 'yaxis': { 'title': 'Fiber density (z-score)', 'range': [-0.5, 1.5], 'zeroline': False, 'tickmode': 'array', 'tickvals': [-0.5, 0, 0.5, 1] }, 'legend': { 'xanchor': 'right', 'yanchor': 'top', 'bordercolor': 'black', 'borderwidth': 2 }, 'shapes': [{ 'type': 'line', 'x0': -0.2, 'y0': -0.45, 'x1': 3.4, 'y1': -0.45, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -0.2, 'y0': -0.45, 'x1': -0.2, 'y1': 1.5, 'line': { 'color': 'black', 'width': 2 } }] }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_bleb_real_vs_fake')
def compute_experiments_data(): _experiments = all_experiments() _experiments = experiments_filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False ) _tuples = experiments_load.experiments_groups_as_tuples(_experiments) _tuples = experiments_filtering.by_time_frames_amount(_tuples, EXPERIMENTS_TIME_FRAMES) _tuples = experiments_filtering.by_real_pairs(_tuples) _tuples = experiments_filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = experiments_filtering.by_band(_tuples) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _time_frame = experiments_compute.density_time_frame(_experiment) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': experiments_config.QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': experiments_config.QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': experiments_config.QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _time_frame }) _windows_dictionary, _windows_to_compute = \ experiments_compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = experiments_compute.fiber_densities(_windows_to_compute) _experiments_fiber_densities = [[] for _i in range(EXPERIMENTS_TIME_FRAMES)] for _tuple in tqdm(_tuples, desc='Experiments loop'): _experiment, _series_id, _group = _tuple _normalization = experiments_load.normalization_series_file_data(_experiment, _series_id) for _time_frame in range(EXPERIMENTS_TIME_FRAMES): for _cell_id in ['left_cell', 'right_cell']: _window_tuple = _windows_dictionary[(_experiment, _series_id, _group, _cell_id)][_time_frame] _fiber_density = _fiber_densities[_window_tuple] if not OUT_OF_BOUNDARIES and _fiber_density[1]: continue _normalized_fiber_density = compute_lib.z_score( _x=_fiber_density[0], _average=_normalization['average'], _std=_normalization['std'] ) if not np.isnan(_normalized_fiber_density): _experiments_fiber_densities[_time_frame].append(_normalized_fiber_density) print('Total experiments pairs:', len(_experiments_fiber_densities[0])) return _experiments_fiber_densities
def main(_real_cells=True, _static=False, _band=True, _high_temporal_resolution=False, _pair_distance_range=None, _offset_y=0.5): if _pair_distance_range is None: _pair_distance_range = PAIR_DISTANCE_RANGE _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, _pair_distance_range) _tuples = filtering.by_real_pairs(_tuples, _real_pairs=_real_cells) _tuples = filtering.by_fake_static_pairs(_tuples, _fake_static_pairs=_static) _tuples = filtering.by_band(_tuples, _band=_band) print('Total tuples:', len(_tuples)) _arguments = [] _longest_time_frame = 0 for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) # save for later if _latest_time_frame > _longest_time_frame: _longest_time_frame = _latest_time_frame for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': _offset_y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows( _arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _valid_tuples = [] _valid_cells = [] _densities = [[] for _ in range(_longest_time_frame)] for _tuple in tqdm(_tuples, desc='Experiments loop'): _experiment, _series_id, _group = _tuple _normalization = load.normalization_series_file_data( _experiment, _series_id) _properties = load.group_properties(_experiment, _series_id, _group) for _cell_id in ['left_cell', 'right_cell']: _cell_fiber_densities = \ _experiments_fiber_densities[(_experiment, _series_id, _group, _cell_id)] _cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids'][_cell_id], _cell_fiber_densities) _previous_cell_fiber_density_normalized = None for _time_frame, _cell_fiber_density in enumerate( _cell_fiber_densities): # not out of border if _cell_fiber_density[1]: _previous_cell_fiber_density_normalized = None continue # normalize _cell_fiber_density_normalized = compute_lib.z_score( _x=_cell_fiber_density[0], _average=_normalization['average'], _std=_normalization['std']) # no previous if _previous_cell_fiber_density_normalized is None: _previous_cell_fiber_density_normalized = _cell_fiber_density_normalized continue # change _cell_fiber_density_normalized_change = _cell_fiber_density_normalized - _previous_cell_fiber_density_normalized _previous_cell_fiber_density_normalized = _cell_fiber_density_normalized # save _densities[_time_frame].append( _cell_fiber_density_normalized_change) if _tuple not in _valid_tuples: _valid_tuples.append(_tuple) _cell_tuple = (_experiment, _series_id, _group, _cell_id) if _cell_tuple not in _valid_cells: _valid_cells.append(_cell_tuple) print('Total pairs:', len(_valid_tuples)) print('Total cells:', len(_valid_cells)) # plot _temporal_resolution = compute.temporal_resolution_in_minutes( _experiments[0]) _fig = go.Figure( data=go.Scatter(x=np.array(range(_longest_time_frame)) * _temporal_resolution, y=[np.mean(_array) for _array in _densities], name='Fiber density change (z-score)', error_y={ 'type': 'data', 'array': [np.std(_array) for _array in _densities], 'thickness': 1 }, mode='lines+markers', marker={ 'size': 5, 'color': '#ea8500' }, line={'dash': 'solid'}, showlegend=False), layout={ 'xaxis': { 'title': 'Time (minutes)', # 'zeroline': False }, 'yaxis': { 'title': 'Fiber density change (z-score)', # 'zeroline': False }, # 'shapes': [ # { # 'type': 'line', # 'x0': -_temporal_resolution, # 'y0': -0.5, # 'x1': -_temporal_resolution, # 'y1': 2, # 'line': { # 'color': 'black', # 'width': 2 # } # }, # { # 'type': 'line', # 'x0': -_temporal_resolution, # 'y0': -0.5, # 'x1': 350, # 'y1': -0.5, # 'line': { # 'color': 'black', # 'width': 2 # } # } # ] }) save.to_html( _fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_real_' + str(_real_cells) + '_static_' + str(_static) + '_band_' + str(_band) + '_high_time_' + str(_high_temporal_resolution) + '_range_' + '_'.join([str(_distance) for _distance in _pair_distance_range]) + '_y_' + str(_offset_y))
def main(): _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=True, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_time_frames_amount( _tuples, compute.density_time_frame(_experiments[0])) _tuples = filtering.by_main_cell(_tuples) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _time_frame = compute.density_time_frame(_experiment) for _direction in ['left', 'right']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': 'cell', 'direction': _direction, 'time_points': _time_frame }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'direction']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _tuples = organize.by_single_cell_id(_tuples) print('Total tuples:', len(_tuples)) _experiments_ids = list(_tuples.keys()) _y_arrays = [[] for _i in DERIVATIVES] for _index_1 in tqdm(range(len(_experiments_ids)), desc='Main loop'): _tuple_1 = _experiments_ids[_index_1] _experiment_1, _series_id_1, _cell_id_1 = _tuple_1 _fiber_densities_1 = compute_single_cell_mean( _experiment=_experiment_1, _series_id=_series_id_1, _cell_tuples=_tuples[_tuple_1], _windows_dictionary=_windows_dictionary, _fiber_densities=_fiber_densities) for _index_2 in range(_index_1 + 1, len(_experiments_ids)): _tuple_2 = _experiments_ids[_index_2] _experiment_2, _series_id_2, _cell_id_2 = _tuple_2 _fiber_densities_2 = compute_single_cell_mean( _experiment=_experiment_2, _series_id=_series_id_2, _cell_tuples=_tuples[_tuple_2], _windows_dictionary=_windows_dictionary, _fiber_densities=_fiber_densities) for _derivative_index, _derivative in enumerate(DERIVATIVES): _y_arrays[_derivative_index].append( compute_lib.correlation( compute_lib.derivative(_fiber_densities_1, _n=_derivative), compute_lib.derivative(_fiber_densities_2, _n=_derivative))) print('Total points:', len(_y_arrays[0])) print('Wilcoxon around the zero') for _y_array, _derivative in zip(_y_arrays, DERIVATIVES): print('Derivative:', _derivative, wilcoxon(_y_array)) # plot _colors_array = config.colors(3) _fig = go.Figure(data=[ go.Box(y=_y, name=_derivative, boxpoints='all', jitter=1, pointpos=0, line={'width': 1}, fillcolor='white', marker={ 'size': 10, 'color': _color }, opacity=0.7, showlegend=False) for _y, _derivative, _color in zip( _y_arrays, DERIVATIVES_TEXT, _colors_array) ], layout={ 'xaxis': { 'title': 'Fiber density derivative', 'zeroline': False }, 'yaxis': { 'title': 'Correlation', 'range': [-1, 1], 'zeroline': False, 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] } }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot')
def main(): _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_time_frames_amount( _tuples, compute.density_time_frame(_experiments[0])) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _time_frame = compute.density_time_frame(_experiment) _pair_distance = \ compute.pair_distance_in_cell_size_time_frame(_experiment, _series_id, _group, _time_frame=_time_frame - 1) for _offset_x in OFFSETS_X: if _pair_distance / 2 - 0.5 - QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER >= _offset_x: for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': _offset_x, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_point': _time_frame - 1 }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id', 'offset_x']) _fiber_densities = compute.fiber_densities(_windows_to_compute) for _offset_x in OFFSETS_X: _x_array = [] _y_array = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple for _cell_id in ['left_cell', 'right_cell']: if (_experiment, _series_id, _group, _cell_id, _offset_x) in _windows_dictionary: _pair_distance = \ compute.pair_distance_in_cell_size_time_frame(_experiment, _series_id, _group, _time_frame=0) _normalization = load.normalization_series_file_data( _experiment, _series_id) _window_tuple = _windows_dictionary[(_experiment, _series_id, _group, _cell_id, _offset_x)][0] _fiber_density = _fiber_densities[_window_tuple] if not OUT_OF_BOUNDARIES and _fiber_density[1]: continue _normalized_fiber_density = compute_lib.z_score( _x=_fiber_density[0], _average=_normalization['average'], _std=_normalization['std']) if not np.isnan(_normalized_fiber_density): _x_array.append(_pair_distance) _y_array.append(_normalized_fiber_density) print('Offset x (cell diameter):', _offset_x) print('Total pairs:', len(_x_array)) print(compute_lib.correlation(_x_array, _y_array, _with_p_value=True)) # plot _fig = go.Figure(data=go.Scatter(x=_x_array, y=_y_array, mode='markers', marker={ 'size': 15, 'color': 'black' }), layout={ 'xaxis': { 'title': 'Pair distance (cell diameter)', 'zeroline': False }, 'yaxis': { 'title': 'Fiber density (z-score)', 'zeroline': False, 'range': [-2.2, 13], 'tickmode': 'array', 'tickvals': [0, 4, 8, 12] }, 'shapes': [{ 'type': 'line', 'x0': 4.5, 'y0': -2, 'x1': 9.5, 'y1': -2, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': 4.5, 'y0': -2, 'x1': 4.5, 'y1': 13, 'line': { 'color': 'black', 'width': 2 } }] }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_offset_x_' + str(_offset_x))
def main(_directions=None): if _directions is None: _directions = ['inside', 'outside'] _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False ) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping(_experiment, _series_id, _group, OFFSET_X) for _cell_id, _direction in product(['left_cell', 'right_cell'], _directions): _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': _direction, 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id', 'direction']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } for _direction in _directions: _y_arrays = [[] for _i in DERIVATIVES] for _tuple in tqdm(_tuples, desc='Experiments loop'): _experiment, _series_id, _group = _tuple if (_experiment, _series_id, _group, 'left_cell', _direction) not in _windows_dictionary or \ (_experiment, _series_id, _group, 'right_cell', _direction) not in _windows_dictionary: continue _properties = load.group_properties(_experiment, _series_id, _group) _left_cell_fiber_densities = \ _experiments_fiber_densities[(_experiment, _series_id, _group, 'left_cell', _direction)] _right_cell_fiber_densities = \ _experiments_fiber_densities[(_experiment, _series_id, _group, 'right_cell', _direction)] _left_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['left_cell'], _left_cell_fiber_densities) _right_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['right_cell'], _right_cell_fiber_densities) if not OUT_OF_BOUNDARIES: _left_cell_fiber_densities, _right_cell_fiber_densities = \ compute.longest_same_indices_shared_in_borders_sub_array( _left_cell_fiber_densities, _right_cell_fiber_densities ) else: _left_cell_fiber_densities = [_fiber_density[0] for _fiber_density in _left_cell_fiber_densities] _right_cell_fiber_densities = [_fiber_density[0] for _fiber_density in _right_cell_fiber_densities] # ignore small arrays _minimum_time_frame_for_correlation = compute.minimum_time_frames_for_correlation(_experiment) if len(_left_cell_fiber_densities) < _minimum_time_frame_for_correlation or \ len(_right_cell_fiber_densities) < _minimum_time_frame_for_correlation: continue for _derivative_index, _derivative in enumerate(DERIVATIVES): _y_arrays[_derivative_index].append(compute_lib.correlation( compute_lib.derivative(_left_cell_fiber_densities, _n=_derivative), compute_lib.derivative(_right_cell_fiber_densities, _n=_derivative) )) print('Direction:', _direction) print('Total pairs:', len(_y_arrays[0])) print('Wilcoxon around the zero') for _y_array, _derivative in zip(_y_arrays, DERIVATIVES): print('Derivative:', _derivative, wilcoxon(_y_array)) # plot _y_title = 'Inner correlation' if _direction == 'inside' else 'Outer correlation' _colors_array = config.colors(3) _fig = go.Figure( data=[ go.Box( y=_y, name=_derivative, boxpoints='all', jitter=1, pointpos=0, line={ 'width': 1 }, fillcolor='white', marker={ 'size': 10, 'color': _color }, opacity=0.7, showlegend=False ) for _y, _derivative, _color in zip(_y_arrays, DERIVATIVES_TEXT, _colors_array) ], layout={ 'xaxis': { 'title': 'Fiber density derivative', 'zeroline': False }, 'yaxis': { 'title': _y_title, 'range': [-1, 1], 'zeroline': False, 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] } } ) save.to_html( _fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_direction_' + _direction )
def compute_cell_pairs(): _x_array = [] _y_array = [] _names_array = [] for _distances_range in PAIR_DISTANCE_RANGES: print('Pair distance range:', str(_distances_range)) _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_time_frames_amount( _tuples, compute.density_time_frame(_experiments[0])) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_pair_distance_range(_tuples, _distances_range) _tuples = filtering.by_band(_tuples) print('Total tuples:', len(_tuples)) _max_offsets_x = [] _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _time_frame = compute.minimum_time_frames_for_correlation( _experiment) _pair_distance = \ compute.pair_distance_in_cell_size_time_frame(_experiment, _series_id, _group, _time_frame - 1) _offsets_x = np.arange( start=0, stop=_pair_distance / 2 - 0.5 - QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, step=OFFSET_X_STEP) if len(_offsets_x) > len(_max_offsets_x): _max_offsets_x = _offsets_x for _offset_x in _offsets_x: for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': _offset_x, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_point': _time_frame - 1 }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'offset_x', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute) _pair_distance_fiber_densities = [[] for _i in range(len(_max_offsets_x))] for _tuple in _tuples: _experiment, _series_id, _group = _tuple for _offset_x_index, _offset_x in enumerate(_max_offsets_x): for _cell_id in ['left_cell', 'right_cell']: if (_experiment, _series_id, _group, _offset_x, _cell_id) in _windows_dictionary: _normalization = load.normalization_series_file_data( _experiment, _series_id) _window_tuple = _windows_dictionary[(_experiment, _series_id, _group, _offset_x, _cell_id)][0] _fiber_density = _fiber_densities[_window_tuple] if not OUT_OF_BOUNDARIES and _fiber_density[1]: continue _normalized_fiber_density = compute_lib.z_score( _x=_fiber_density[0], _average=_normalization['average'], _std=_normalization['std']) if not np.isnan(_normalized_fiber_density): _pair_distance_fiber_densities[ _offset_x_index].append( _normalized_fiber_density) _x_array.append(_max_offsets_x) _y_array.append(_pair_distance_fiber_densities) _names_array.append('Pair distance ' + str(_distances_range[0]) + '-' + str(_distances_range[1])) return _names_array, _x_array, _y_array
def main(_high_temporal_resolution=True): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False ) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping(_experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _tuples_by_experiment = organize.by_experiment(_tuples) _y_arrays = [[] for _i in TIME_FRAMES_STEPS[_high_temporal_resolution]] _x_array = [] for _time_frame_index, _time_frame_every in enumerate(TIME_FRAMES_STEPS[_high_temporal_resolution]): print('Temporal resolution (minutes):', _time_frame_every * TEMPORAL_RESOLUTION[_high_temporal_resolution]) _higher_same_counter = 0 _valid_tuples = [] for _time_frame_begin in range(_time_frame_every): for _experiment in _tuples_by_experiment: _experiment_tuples = _tuples_by_experiment[_experiment] for _same_index in range(len(_experiment_tuples)): _same_tuple = _experiment_tuples[_same_index] _same_experiment, _same_series, _same_group = _same_tuple _same_left_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'left_cell') ] _same_right_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'right_cell') ] _same_properties = \ load.group_properties(_same_experiment, _same_series, _same_group) _same_left_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['left_cell'], _same_left_cell_fiber_densities ) _same_right_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['right_cell'], _same_right_cell_fiber_densities ) _same_left_cell_fiber_densities_filtered, _same_right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_left_cell_fiber_densities, _same_right_cell_fiber_densities ) # ignore small arrays if len(_same_left_cell_fiber_densities_filtered) < compute.minimum_time_frames_for_correlation(_same_experiment): continue _same_left_cell_fiber_densities_filtered = \ _same_left_cell_fiber_densities_filtered[_time_frame_begin::_time_frame_every] _same_right_cell_fiber_densities_filtered = \ _same_right_cell_fiber_densities_filtered[_time_frame_begin::_time_frame_every] # secondary ignore if len(_same_left_cell_fiber_densities_filtered) < \ GENERAL_MINIMUM_CORRELATION_TIME_FRAMES[_high_temporal_resolution]: continue _same_correlation = compute_lib.correlation( compute_lib.derivative(_same_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative(_same_right_cell_fiber_densities_filtered, _n=DERIVATIVE) ) for _different_index in range(len(_experiment_tuples)): if _same_index != _different_index: _different_tuple = _experiment_tuples[_different_index] _different_experiment, _different_series, _different_group = \ _different_tuple for _same_cell_id, _different_cell_id in product(['left_cell', 'right_cell'], ['left_cell', 'right_cell']): _same_fiber_densities = _experiments_fiber_densities[( _same_experiment, _same_series, _same_group, _same_cell_id )] _different_fiber_densities = _experiments_fiber_densities[( _different_experiment, _different_series, _different_group, _different_cell_id )] _different_properties = load.group_properties( _different_experiment, _different_series, _different_group ) _same_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids'][_same_cell_id], _same_fiber_densities ) _different_fiber_densities = compute.remove_blacklist( _different_experiment, _different_series, _different_properties['cells_ids'][_different_cell_id], _different_fiber_densities ) _same_fiber_densities_filtered, _different_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_fiber_densities, _different_fiber_densities ) # ignore small arrays if len(_same_fiber_densities_filtered) < compute.minimum_time_frames_for_correlation(_different_experiment): continue _same_fiber_densities_filtered = \ _same_fiber_densities_filtered[_time_frame_begin::_time_frame_every] _different_fiber_densities_filtered = \ _different_fiber_densities_filtered[_time_frame_begin::_time_frame_every] # secondary ignore if len(_same_fiber_densities_filtered) < \ GENERAL_MINIMUM_CORRELATION_TIME_FRAMES[_high_temporal_resolution]: continue _different_correlation = compute_lib.correlation( compute_lib.derivative(_same_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative(_different_fiber_densities_filtered, _n=DERIVATIVE) ) _point_distance = compute_lib.distance_from_a_point_to_a_line( _line=[-1, -1, 1, 1], _point=[_same_correlation, _different_correlation] ) if _same_correlation > _different_correlation: _y_arrays[_time_frame_index].append(_point_distance) _higher_same_counter += 1 else: _y_arrays[_time_frame_index].append(-_point_distance) if _same_tuple not in _valid_tuples: _valid_tuples.append(_same_tuple) print('Total tuples:', len(_valid_tuples)) print('Total points:', len(_y_arrays[_time_frame_index])) print('Wilcoxon around the zero:') print(wilcoxon(_y_arrays[_time_frame_index])) print('Higher same amount:', _higher_same_counter / len(_y_arrays[_time_frame_index])) _x_array.append(_time_frame_every * TEMPORAL_RESOLUTION[_high_temporal_resolution]) # plot _fig = go.Figure( data=go.Scatter( x=_x_array, y=[np.mean(_array) for _array in _y_arrays], error_y={ 'type': 'data', 'array': [np.std(_array) for _array in _y_arrays], 'thickness': 1, 'color': '#ea8500' }, mode='markers', marker={ 'size': 15, 'color': '#ea8500' }, showlegend=False ), layout={ 'xaxis': { 'title': 'Temporal resolution (minutes)', 'zeroline': False }, 'yaxis': { 'title': 'Same minus different correlation', 'range': [-1, 1.1], 'zeroline': False, 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] } } ) save.to_html( _fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_high_temporal_res_' + str(_high_temporal_resolution) )
def main(): print('Single Cell') _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=True, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_time_frames_amount( _tuples, compute.minimum_time_frames_for_correlation(_experiments[0])) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _time_frame = compute.minimum_time_frames_for_correlation(_experiment) for _offset_x, _direction in product(OFFSETS_X, ['left', 'right']): _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': _offset_x, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': 'cell', 'direction': _direction, 'time_point': _time_frame - 1 }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'offset_x', 'direction']) _fiber_densities = compute.fiber_densities(_windows_to_compute) _tuples = organize.by_single_cell_id(_tuples) _single_cell_fiber_densities = [[] for _i in range(len(OFFSETS_X))] for _tuple in _tuples: _experiment, _series_id, _cell_id = _tuple print('Experiment:', _experiment, 'Series ID:', _series_id, 'Cell ID:', _cell_id, sep='\t') _offset_index = 0 _normalization = load.normalization_series_file_data( _experiment, _series_id) for _offset_x in OFFSETS_X: _cell_fiber_densities = [] for _cell_tuple in _tuples[_tuple]: _, _, _group = _cell_tuple for _direction in ['left', 'right']: _window_tuple = _windows_dictionary[(_experiment, _series_id, _group, _offset_x, _direction)][0] _fiber_density = _fiber_densities[_window_tuple] if not OUT_OF_BOUNDARIES and _fiber_density[1]: continue _normalized_fiber_density = compute_lib.z_score( _x=_fiber_density[0], _average=_normalization['average'], _std=_normalization['std']) _cell_fiber_densities.append(_normalized_fiber_density) if len(_cell_fiber_densities) > 0: _single_cell_fiber_densities[_offset_index].append( np.mean(_cell_fiber_densities)) _offset_index += 1 print('Pairs') _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_time_frames_amount( _tuples, compute.minimum_time_frames_for_correlation(_experiments[0])) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_pair_distance(_tuples, PAIR_DISTANCE) _tuples = filtering.by_band(_tuples) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _time_frame = compute.minimum_time_frames_for_correlation(_experiment) for _offset_x in OFFSETS_X: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': _offset_x, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': 'left_cell', 'direction': 'inside', 'time_point': _time_frame - 1 }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'offset_x']) _fiber_densities = compute.fiber_densities(_windows_to_compute) _pairs_fiber_densities = [[] for _i in range(len(OFFSETS_X))] for _tuple in _tuples: _experiment, _series_id, _group = _tuple print('Experiment:', _experiment, 'Series ID:', _series_id, 'Group:', _group, sep='\t') _offset_index = 0 _normalization = load.normalization_series_file_data( _experiment, _series_id) # take offsets based on pair distance _properties = load.group_properties(_experiment, _series_id, _group) _left_cell_coordinates = [ list(_properties['time_points'][0]['left_cell'] ['coordinates'].values()) ] _right_cell_coordinates = [ list(_properties['time_points'][0]['right_cell'] ['coordinates'].values()) ] _pair_distance = compute.pair_distance_in_cell_size( _experiment, _series_id, _left_cell_coordinates, _right_cell_coordinates) _edges_distance = _pair_distance - 1 _max_x_offset = _edges_distance - QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER for _offset_x in OFFSETS_X: if _offset_x > _max_x_offset: break _fiber_density = _fiber_densities[_windows_dictionary[( _experiment, _series_id, _group, _offset_x)][0]] if not OUT_OF_BOUNDARIES and _fiber_density[1]: continue _normalized_fiber_density = compute_lib.z_score( _x=_fiber_density[0], _average=_normalization['average'], _std=_normalization['std']) _pairs_fiber_densities[_offset_index].append( _normalized_fiber_density) _offset_index += 1 # plot _fig = go.Figure(data=[ go.Scatter(x=OFFSETS_X, y=[np.mean(_array) for _array in _pairs_fiber_densities], name='Pairs', error_y={ 'type': 'data', 'array': [np.std(_array) for _array in _pairs_fiber_densities], 'thickness': 1 }, mode='lines+markers', line={'dash': 'solid'}), go.Scatter( x=OFFSETS_X, y=[np.mean(_array) for _array in _single_cell_fiber_densities], name='Single Cell', error_y={ 'type': 'data', 'array': [np.std(_array) for _array in _single_cell_fiber_densities], 'thickness': 1 }, mode='lines+markers', line={'dash': 'dash'}) ], layout={ 'xaxis_title': 'Distance from left cell (cell size)', 'yaxis_title': 'Fiber density (z-score)' }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_distance_' + str(PAIR_DISTANCE))
def compute_fiber_densities(_band=True, _high_temporal_resolution=True): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples, _band=_band) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows( _arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _same_correlation_vs_time_lag = {} _same_time_lags_arrays = [[] for _i in TIME_LAGS[_high_temporal_resolution]] _different_time_lags_arrays = [ [] for _i in TIME_LAGS[_high_temporal_resolution] ] _same_time_lags_highest = [ 0 for _i in TIME_LAGS[_high_temporal_resolution] ] _different_time_lags_highest = [ 0 for _i in TIME_LAGS[_high_temporal_resolution] ] _valid_tuples = [] for _same_index in tqdm(range(len(_tuples)), desc='Main loop'): _same_tuple = _tuples[_same_index] _same_experiment, _same_series, _same_group = _same_tuple _same_left_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'left_cell') ] _same_right_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'right_cell') ] _same_properties = \ load.group_properties(_same_experiment, _same_series, _same_group) _same_left_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['left_cell'], _same_left_cell_fiber_densities) _same_right_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['right_cell'], _same_right_cell_fiber_densities) # time lag _same_highest_correlation = -1.1 _same_highest_correlation_time_lag_index = 0 _same_correlation_vs_time_lag[_same_tuple] = [] for _time_lag_index, _time_lag in enumerate( TIME_LAGS[_high_temporal_resolution]): # choose either negative or positive lag for _symbol in [-1, 1]: # if no time lag consider it only once if _time_lag == 0 and _symbol == -1: continue _time_lag_symbol = _time_lag * _symbol if _time_lag_symbol > 0: _same_left_cell_fiber_densities_time_lag = _same_left_cell_fiber_densities[: -_time_lag_symbol] _same_right_cell_fiber_densities_time_lag = _same_right_cell_fiber_densities[ _time_lag_symbol:] elif _time_lag_symbol < 0: _same_left_cell_fiber_densities_time_lag = _same_left_cell_fiber_densities[ -_time_lag_symbol:] _same_right_cell_fiber_densities_time_lag = _same_right_cell_fiber_densities[: _time_lag_symbol] else: _same_left_cell_fiber_densities_time_lag = _same_left_cell_fiber_densities _same_right_cell_fiber_densities_time_lag = _same_right_cell_fiber_densities _same_left_cell_fiber_densities_filtered, _same_right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_left_cell_fiber_densities_time_lag, _same_right_cell_fiber_densities_time_lag ) # ignore small arrays if len(_same_left_cell_fiber_densities_filtered ) < compute.minimum_time_frames_for_correlation( _same_experiment): _same_correlation_vs_time_lag[_same_tuple].append(None) continue _same_correlation = compute_lib.correlation( compute_lib.derivative( _same_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _same_right_cell_fiber_densities_filtered, _n=DERIVATIVE)) _same_time_lags_arrays[_time_lag_index].append( _same_correlation) _same_correlation_vs_time_lag[_same_tuple].append( _same_correlation) if _same_correlation > _same_highest_correlation: _same_highest_correlation = _same_correlation _same_highest_correlation_time_lag_index = _time_lag_index _same_time_lags_highest[_same_highest_correlation_time_lag_index] += 1 for _different_index in range(len(_tuples)): if _same_index != _different_index: _different_tuple = _tuples[_different_index] _different_experiment, _different_series, _different_group = \ _different_tuple for _same_cell_id, _different_cell_id in product( ['left_cell', 'right_cell'], ['left_cell', 'right_cell']): _same_fiber_densities = _experiments_fiber_densities[( _same_experiment, _same_series, _same_group, _same_cell_id)] _different_fiber_densities = _experiments_fiber_densities[( _different_experiment, _different_series, _different_group, _different_cell_id)] _different_properties = load.group_properties( _different_experiment, _different_series, _different_group) _same_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids'][_same_cell_id], _same_fiber_densities) _different_fiber_densities = compute.remove_blacklist( _different_experiment, _different_series, _different_properties['cells_ids'][_different_cell_id], _different_fiber_densities) # time lag _different_highest_correlation = -1.1 _different_highest_correlation_time_lag_index = 0 for _time_lag_index, _time_lag in enumerate( TIME_LAGS[_high_temporal_resolution]): # choose either negative or positive lag for _symbol in [-1, 1]: # if no time lag consider it only once if _time_lag == 0 and _symbol == -1: continue _time_lag_symbol = _time_lag * _symbol if _time_lag_symbol > 0: _same_fiber_densities_time_lag = _same_fiber_densities[: -_time_lag_symbol] _different_fiber_densities_time_lag = _different_fiber_densities[ _time_lag_symbol:] elif _time_lag_symbol < 0: _same_fiber_densities_time_lag = _same_fiber_densities[ -_time_lag_symbol:] _different_fiber_densities_time_lag = _different_fiber_densities[: _time_lag_symbol] else: _same_fiber_densities_time_lag = _same_fiber_densities _different_fiber_densities_time_lag = _different_fiber_densities _same_fiber_densities_filtered, _different_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_fiber_densities_time_lag, _different_fiber_densities_time_lag ) # ignore small arrays if len( _same_fiber_densities_filtered ) < compute.minimum_time_frames_for_correlation( _different_experiment): continue _different_correlation = compute_lib.correlation( compute_lib.derivative( _same_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _different_fiber_densities_filtered, _n=DERIVATIVE)) _different_time_lags_arrays[ _time_lag_index].append(_different_correlation) if _different_correlation > _different_highest_correlation: _different_highest_correlation = _different_correlation _different_highest_correlation_time_lag_index = _time_lag_index if _same_tuple not in _valid_tuples: _valid_tuples.append(_same_tuple) _different_time_lags_highest[ _different_highest_correlation_time_lag_index] += 1 print('Total tuples:', len(_valid_tuples)) return _same_correlation_vs_time_lag, _same_time_lags_arrays, _different_time_lags_arrays, \ _same_time_lags_highest, _different_time_lags_highest
def main(_band=True): _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=None, _is_bleb=True, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range( _tuples, _distance_range=PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples, _real_pairs=REAL_CELLS) _tuples = filtering.by_fake_static_pairs(_tuples, _fake_static_pairs=STATIC) _tuples = filtering.by_band(_tuples, _band=_band) _tuples = filtering.by_bleb_from_start(_tuples, _from_start=False) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows( _arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _tuples_by_experiment = organize.by_experiment(_tuples) # same (before, after), different (before, after) _correlations = [[[], []], [[], []]] _valid_real_tuples = [] for _experiment in _tuples_by_experiment: print('Experiment:', _experiment) _experiment_tuples = _tuples_by_experiment[_experiment] for _same_index in tqdm(range(len(_experiment_tuples)), desc='Main loop'): _same_tuple = _experiment_tuples[_same_index] _same_experiment, _same_series, _same_group = _same_tuple _same_left_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'left_cell') ] _same_right_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'right_cell') ] _same_properties = \ load.group_properties(_same_experiment, _same_series, _same_group) _same_left_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['left_cell'], _same_left_cell_fiber_densities) _same_right_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['right_cell'], _same_right_cell_fiber_densities) _same_before_left_cell_fiber_densities = \ _same_left_cell_fiber_densities[:AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_same_experiment]] _same_before_right_cell_fiber_densities = \ _same_right_cell_fiber_densities[:AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_same_experiment]] _same_after_left_cell_fiber_densities = \ _same_left_cell_fiber_densities[AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_same_experiment]:] _same_after_right_cell_fiber_densities = \ _same_right_cell_fiber_densities[AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_same_experiment]:] _same_before_left_cell_fiber_densities_filtered, _same_before_right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_before_left_cell_fiber_densities, _same_before_right_cell_fiber_densities ) _same_after_left_cell_fiber_densities_filtered, _same_after_right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_after_left_cell_fiber_densities, _same_after_right_cell_fiber_densities ) # ignore small arrays _minimum_time_frame_for_correlation = compute.minimum_time_frames_for_correlation( _same_experiment) if len(_same_before_left_cell_fiber_densities_filtered) < _minimum_time_frame_for_correlation or \ len(_same_after_left_cell_fiber_densities_filtered) < _minimum_time_frame_for_correlation: continue _same_before_correlation = compute_lib.correlation( compute_lib.derivative( _same_before_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _same_before_right_cell_fiber_densities_filtered, _n=DERIVATIVE)) _same_after_correlation = compute_lib.correlation( compute_lib.derivative( _same_after_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _same_after_right_cell_fiber_densities_filtered, _n=DERIVATIVE)) for _different_index in range(len(_experiment_tuples)): if _same_index != _different_index: _different_tuple = _experiment_tuples[_different_index] _different_experiment, _different_series, _different_group = _different_tuple for _same_cell_id, _different_cell_id in product( ['left_cell', 'right_cell'], ['left_cell', 'right_cell']): _same_fiber_densities = _experiments_fiber_densities[( _same_experiment, _same_series, _same_group, _same_cell_id)] _different_fiber_densities = _experiments_fiber_densities[ (_different_experiment, _different_series, _different_group, _different_cell_id)] _different_properties = load.group_properties( _different_experiment, _different_series, _different_group) _same_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids'][_same_cell_id], _same_fiber_densities) _different_fiber_densities = compute.remove_blacklist( _different_experiment, _different_series, _different_properties['cells_ids'] [_different_cell_id], _different_fiber_densities) _same_before_fiber_densities = \ _same_fiber_densities[:AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_same_experiment]] _same_after_fiber_densities = \ _same_fiber_densities[AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_same_experiment]:] _different_before_fiber_densities = \ _different_fiber_densities[:AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_different_experiment]] _different_after_fiber_densities = \ _different_fiber_densities[AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_different_experiment]:] _same_before_fiber_densities_filtered, _different_before_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_before_fiber_densities, _different_before_fiber_densities ) _same_after_fiber_densities_filtered, _different_after_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_after_fiber_densities, _different_after_fiber_densities ) # ignore small arrays if len(_same_before_fiber_densities_filtered) < _minimum_time_frame_for_correlation or \ len(_same_after_fiber_densities_filtered) < _minimum_time_frame_for_correlation: continue _different_before_correlation = compute_lib.correlation( compute_lib.derivative( _same_before_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _different_before_fiber_densities_filtered, _n=DERIVATIVE)) _different_after_correlation = compute_lib.correlation( compute_lib.derivative( _same_after_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _different_after_fiber_densities_filtered, _n=DERIVATIVE)) _correlations[0][0].append(_same_before_correlation) _correlations[0][1].append(_same_after_correlation) _correlations[1][0].append( _different_before_correlation) _correlations[1][1].append( _different_after_correlation) if _same_tuple not in _valid_real_tuples: _valid_real_tuples.append(_same_tuple) print('Total tuples:', len(_valid_real_tuples)) _distances_from_y_equal_x = [[], []] _same_correlations, _different_correlations = _correlations _same_before_correlations, _same_after_correlations = _same_correlations _different_before_correlations, _different_after_correlations = _different_correlations for _same_before, _same_after, _different_before, _different_after in \ zip(_same_before_correlations, _same_after_correlations, _different_before_correlations, _different_after_correlations): for _group_type_index, _same, _different in \ zip([0, 1], [_same_before, _same_after], [_different_before, _different_after]): _point_distance = compute_lib.distance_from_a_point_to_a_line( _line=[-1, -1, 1, 1], _point=[_same, _different]) if _same > _different: _distances_from_y_equal_x[_group_type_index].append( _point_distance) else: _distances_from_y_equal_x[_group_type_index].append( -_point_distance) print('Total points:', len(_distances_from_y_equal_x[0])) print('Higher before same amount:', (np.array(_distances_from_y_equal_x[0]) > 0).sum() / len(_distances_from_y_equal_x[0])) print('Wilcoxon of before points:', wilcoxon(_distances_from_y_equal_x[0])) print('Higher after same amount:', (np.array(_distances_from_y_equal_x[1]) > 0).sum() / len(_distances_from_y_equal_x[1])) print('Wilcoxon of after points:', wilcoxon(_distances_from_y_equal_x[1])) _before_minus_after = np.array(_distances_from_y_equal_x[0]) - np.array( _distances_from_y_equal_x[1]) print('Before > after amount:', (_before_minus_after > 0).sum() / len(_before_minus_after)) print('Wilcoxon before & after:', wilcoxon(_distances_from_y_equal_x[0], _distances_from_y_equal_x[1])) # box plot _colors_array = config.colors(2) _names_array = ['Before', 'After'] _fig = go.Figure(data=[ go.Box(y=_y_array, name=_name, boxpoints=False, line={'width': 1}, marker={'color': _color}, showlegend=False) for _y_array, _name, _color in zip( _distances_from_y_equal_x, _names_array, _colors_array) ], layout={ 'xaxis': { 'zeroline': False }, 'yaxis': { 'title': 'Same minus different correlation', 'zeroline': False, 'range': [-1, 1.1], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] } }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_box') # scatter plot _fig = go.Figure(data=go.Scatter(x=_distances_from_y_equal_x[0], y=_distances_from_y_equal_x[1], mode='markers', marker={ 'size': 5, 'color': '#ea8500' }, showlegend=False), layout={ 'xaxis': { 'title': 'Before bleb', 'zeroline': False, 'range': [-1.1, 1.2], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'yaxis': { 'title': 'After bleb', 'zeroline': False, 'range': [-1.1, 1.2], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'shapes': [{ 'type': 'line', 'x0': -1, 'y0': -1, 'x1': -1, 'y1': 1, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': 1, 'y1': -1, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': 1, 'y1': 1, 'line': { 'color': 'red', 'width': 2 } }] }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot')
def main(): _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_time_frames_amount( _tuples, compute.density_time_frame(_experiments[0])) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples) _max_offsets_x = [] _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _time_frame = compute.minimum_time_frames_for_correlation(_experiment) _pair_distance = \ compute.pair_distance_in_cell_size_time_frame(_experiment, _series_id, _group, _time_frame=_time_frame - 1) _offsets_x = \ np.arange(start=0, stop=_pair_distance / 2 - 0.5 - QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, step=OFFSET_X_STEP) if len(_offsets_x) > len(_max_offsets_x): _max_offsets_x = _offsets_x for _offset_x in _offsets_x: for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': _offset_x, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_point': _time_frame - 1 }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'offset_x', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute) _x_array = [] _y_array = [] _n_array = [] _p_value_array = [] for _offset_x in _max_offsets_x: _pair_distances = [] _z_scores = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple for _cell_id in ['left_cell', 'right_cell']: if (_experiment, _series_id, _group, _offset_x, _cell_id) in _windows_dictionary: _normalization = load.normalization_series_file_data( _experiment, _series_id) _window_tuple = _windows_dictionary[(_experiment, _series_id, _group, _offset_x, _cell_id)][0] _fiber_density = _fiber_densities[_window_tuple] if not OUT_OF_BOUNDARIES and _fiber_density[1]: continue _normalized_fiber_density = compute_lib.z_score( _x=_fiber_density[0], _average=_normalization['average'], _std=_normalization['std']) if not np.isnan(_normalized_fiber_density): _pair_distances.append( compute.pair_distance_in_cell_size_time_frame( _experiment, _series_id, _group, _time_frame=0)) _z_scores.append(_normalized_fiber_density) if len(_pair_distances) > 2: _x_array.append(round(_offset_x, 1)) _correlation = compute_lib.correlation(_pair_distances, _z_scores, _with_p_value=True) _y_array.append(round(_correlation[0], 2)) _n_array.append(len(_pair_distances)) _p_value_array.append(round(_correlation[1], 2)) print('Pearson:') print(compute_lib.correlation(_x_array, _y_array, _with_p_value=True)) # plot _significant_x_array = [ _x for _x, _p_value in zip(_x_array, _p_value_array) if _p_value < 0.05 ] _fig = go.Figure(data=[ go.Scatter(x=_x_array, y=_y_array, mode='markers', marker={ 'size': 15, 'color': 'black' }, showlegend=False), go.Scatter(x=_significant_x_array, y=[-0.79] * len(_significant_x_array), mode='text', text='*', textfont={'color': 'red'}, showlegend=False) ], layout={ 'xaxis': { 'title': 'Window distance (cell diameter)', 'zeroline': False }, 'yaxis': { 'title': 'Correlation:<br>pair distance vs. fiber density', 'zeroline': False, 'range': [-0.82, 0.3], 'tickmode': 'array', 'tickvals': [-0.75, -0.25, 0.25] }, 'shapes': [{ 'type': 'line', 'x0': -0.2, 'y0': -0.8, 'x1': 3.2, 'y1': -0.8, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -0.2, 'y0': -0.8, 'x1': -0.2, 'y1': 0.3, 'line': { 'color': 'black', 'width': 2 } }] }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot') # table print('Window distance (cell diameter)', 'Correlation: pair distance vs. fiber density', 'N', 'P-value', sep='\t') for _x, _y, _n, _p_value in zip(_x_array, _y_array, _n_array, _p_value_array): print(_x, _y, _n, _p_value, sep='\t')
def main(_band=True): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=None, _is_bleb=True, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False ) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, _distance_range=PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples, _real_pairs=REAL_CELLS) _tuples = filtering.by_fake_static_pairs(_tuples, _fake_static_pairs=STATIC) _tuples = filtering.by_band(_tuples, _band=_band) _tuples = filtering.by_bleb_from_start(_tuples, _from_start=False) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping(_experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _n_pairs = 0 _before_correlations = [] _after_correlations = [] for _tuple in tqdm(_tuples, desc='Experiments loop'): _experiment, _series_id, _group = _tuple _left_cell_fiber_densities = \ _experiments_fiber_densities[(_experiment, _series_id, _group, 'left_cell')] _right_cell_fiber_densities = \ _experiments_fiber_densities[(_experiment, _series_id, _group, 'right_cell')] _properties = load.group_properties(_experiment, _series_id, _group) _left_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['left_cell'], _left_cell_fiber_densities) _right_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['right_cell'], _right_cell_fiber_densities) _before_left_cell_fiber_densities = \ _left_cell_fiber_densities[:AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_experiment]] _before_right_cell_fiber_densities = \ _right_cell_fiber_densities[:AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_experiment]] _after_left_cell_fiber_densities = \ _left_cell_fiber_densities[AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_experiment]:] _after_right_cell_fiber_densities = \ _right_cell_fiber_densities[AFTER_BLEB_INJECTION_FIRST_TIME_FRAME[_experiment]:] _before_left_cell_fiber_densities_filtered, _before_right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _before_left_cell_fiber_densities, _before_right_cell_fiber_densities) _after_left_cell_fiber_densities_filtered, _after_right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _after_left_cell_fiber_densities, _after_right_cell_fiber_densities) # ignore small arrays _minimum_time_frame_for_correlation = compute.minimum_time_frames_for_correlation(_experiment) if len(_before_left_cell_fiber_densities_filtered) < _minimum_time_frame_for_correlation or \ len(_after_left_cell_fiber_densities_filtered) < _minimum_time_frame_for_correlation: continue _n_pairs += 1 _before_correlations.append(compute_lib.correlation( compute_lib.derivative(_before_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative(_before_right_cell_fiber_densities_filtered, _n=DERIVATIVE) )) _after_correlations.append(compute_lib.correlation( compute_lib.derivative(_after_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative(_after_right_cell_fiber_densities_filtered, _n=DERIVATIVE) )) print('Total pairs:', _n_pairs) _before_minus_after = np.array(_before_correlations) - np.array(_after_correlations) print('Wilcoxon of before minus after around the zero:') print(wilcoxon(_before_minus_after)) print('Higher before amount:', (_before_minus_after > 0).sum() / len(_before_minus_after)) # plot _fig = go.Figure( data=go.Scatter( x=_before_correlations, y=_after_correlations, mode='markers', marker={ 'size': 5, 'color': '#ea8500' }, showlegend=False ), layout={ 'xaxis': { 'title': 'Correlation before bleb', 'zeroline': False, 'range': [-1.1, 1.2], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'yaxis': { 'title': 'Correlation after bleb', 'zeroline': False, 'range': [-1.1, 1.2], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'shapes': [ { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': -1, 'y1': 1, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': 1, 'y1': -1, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': 1, 'y1': 1, 'line': { 'color': 'red', 'width': 2 } } ] } ) save.to_html( _fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot' )
def main(): _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_time_frames_amount( _tuples, compute.density_time_frame(_experiments[0])) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _kpss_y_arrays = [[] for _i in DERIVATIVES] _adf_y_arrays = [[] for _i in DERIVATIVES] for _tuple in tqdm(_tuples, desc='Experiments loop'): _experiment, _series_id, _group = _tuple _properties = load.group_properties(_experiment, _series_id, _group) _left_cell_fiber_densities = _experiments_fiber_densities[( _experiment, _series_id, _group, 'left_cell')] _left_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['left_cell'], _left_cell_fiber_densities) _right_cell_fiber_densities = _experiments_fiber_densities[( _experiment, _series_id, _group, 'right_cell')] _right_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['right_cell'], _right_cell_fiber_densities) if not OUT_OF_BOUNDARIES: _left_cell_fiber_densities = \ compute.longest_fiber_densities_ascending_sequence(_left_cell_fiber_densities) _right_cell_fiber_densities = \ compute.longest_fiber_densities_ascending_sequence(_right_cell_fiber_densities) else: _left_cell_fiber_densities = [ _fiber_density[0] for _fiber_density in _left_cell_fiber_densities ] _right_cell_fiber_densities = [ _fiber_density[0] for _fiber_density in _right_cell_fiber_densities ] # ignore small arrays _minimum_time_frames_for_correlation = compute.minimum_time_frames_for_correlation( _experiment) if len(_left_cell_fiber_densities) < _minimum_time_frames_for_correlation or \ len(_right_cell_fiber_densities) < _minimum_time_frames_for_correlation: continue for _derivative_index, _derivative in enumerate(DERIVATIVES): for _cell_fiber_densities in [ _left_cell_fiber_densities, _right_cell_fiber_densities ]: _cell_fiber_densities_derivative = compute_lib.derivative( _cell_fiber_densities, _n=_derivative) _, _kpss_p_value, _, _ = kpss(_cell_fiber_densities_derivative, nlags='legacy') _kpss_y_arrays[_derivative_index].append(_kpss_p_value) _, _adf_p_value, _, _, _, _ = adfuller( _cell_fiber_densities_derivative) _adf_y_arrays[_derivative_index].append(_adf_p_value) print('Total pairs:', len(_kpss_y_arrays[0]) / 2) # print results print('KPSS:') for _derivative_index, _derivative in enumerate(DERIVATIVES): _stationary_count = len([ _value for _value in _kpss_y_arrays[_derivative_index] if _value > 0.05 ]) print( 'Derivative:', _derivative, 'Stationary:', str(_stationary_count / len(_kpss_y_arrays[_derivative_index]) * 100) + '%') print('ADF:') for _derivative_index, _derivative in enumerate(DERIVATIVES): _stationary_count = len([ _value for _value in _adf_y_arrays[_derivative_index] if _value < 0.05 ]) print( 'Derivative:', _derivative, 'Stationary:', str(_stationary_count / len(_adf_y_arrays[_derivative_index]) * 100) + '%') # plot _colors_array = config.colors(3) for _test_name, _y_title, _y_tickvals, _p_value_line, _y_arrays in \ zip( ['kpss', 'adf'], ['KPSS test p-value', 'ADF test p-value'], [[0.05, 0.1], [0.05, 1]], [0.05, 0.05], [_kpss_y_arrays, _adf_y_arrays] ): _fig = go.Figure(data=[ go.Box(y=_y, name=_derivative, boxpoints='all', jitter=1, pointpos=0, line={'width': 1}, fillcolor='white', marker={ 'size': 10, 'color': _color }, opacity=0.7, showlegend=False) for _y, _derivative, _color in zip( _y_arrays, DERIVATIVES_TEXT, _colors_array) ], layout={ 'xaxis': { 'title': 'Fiber density derivative', 'zeroline': False }, 'yaxis': { 'title': _y_title, 'zeroline': False, 'tickmode': 'array', 'tickvals': _y_tickvals }, 'shapes': [{ 'type': 'line', 'x0': DERIVATIVES[0] - 0.75, 'y0': _p_value_line, 'x1': DERIVATIVES[-1] + 0.75, 'y1': _p_value_line, 'line': { 'color': 'red', 'width': 2, 'dash': 'dash' } }] }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_' + _test_name)
def main(_high_temporal_resolution=True): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple for _cell_id in ['left_cell', 'right_cell']: _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) if ALIGNMENT_OFFSET_Y != OFFSET_Y: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': ALIGNMENT_OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id', 'offset_y']) _fiber_densities = compute.fiber_densities(_windows_to_compute) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _experiments_fiber_densities_aligned = align_by_z_score( _tuples, _experiments_fiber_densities) _tuples_by_experiment = organize.by_experiment(_tuples) _same_correlations_array = [] _different_correlations_array = [] _valid_tuples = [] for _experiment in _tuples_by_experiment: print('Experiment:', _experiment) _experiment_tuples = _tuples_by_experiment[_experiment] for _same_index in tqdm(range(len(_experiment_tuples)), desc='Main loop'): _same_tuple = _experiment_tuples[_same_index] _same_experiment, _same_series, _same_group = _same_tuple _same_left_cell_fiber_densities = \ _experiments_fiber_densities_aligned[ (_same_experiment, _same_series, _same_group, 'left_cell') ] _same_right_cell_fiber_densities = \ _experiments_fiber_densities_aligned[ (_same_experiment, _same_series, _same_group, 'right_cell') ] _same_properties = \ load.group_properties(_same_experiment, _same_series, _same_group) _same_left_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['left_cell'], _same_left_cell_fiber_densities) _same_right_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['right_cell'], _same_right_cell_fiber_densities) _same_left_cell_fiber_densities_filtered, _same_right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_left_cell_fiber_densities, _same_right_cell_fiber_densities ) # ignore small arrays if len(_same_left_cell_fiber_densities_filtered ) < compute.minimum_time_frames_for_correlation( _same_experiment): continue _same_correlation = compute_lib.correlation( compute_lib.derivative( _same_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _same_right_cell_fiber_densities_filtered, _n=DERIVATIVE)) for _different_index in range(len(_experiment_tuples)): if _same_index != _different_index: _different_tuple = _experiment_tuples[_different_index] _different_experiment, _different_series, _different_group = \ _different_tuple for _same_cell_id, _different_cell_id in product( ['left_cell', 'right_cell'], ['left_cell', 'right_cell']): _same_fiber_densities = _experiments_fiber_densities_aligned[ (_same_experiment, _same_series, _same_group, _same_cell_id)] _different_fiber_densities = _experiments_fiber_densities_aligned[ (_different_experiment, _different_series, _different_group, _different_cell_id)] _different_properties = load.group_properties( _different_experiment, _different_series, _different_group) _same_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids'][_same_cell_id], _same_fiber_densities) _different_fiber_densities = compute.remove_blacklist( _different_experiment, _different_series, _different_properties['cells_ids'] [_different_cell_id], _different_fiber_densities) _same_fiber_densities_filtered, _different_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_fiber_densities, _different_fiber_densities ) # ignore small arrays if len(_same_fiber_densities_filtered ) < compute.minimum_time_frames_for_correlation( _different_experiment): continue _different_correlation = compute_lib.correlation( compute_lib.derivative( _same_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _different_fiber_densities_filtered, _n=DERIVATIVE)) _same_correlations_array.append(_same_correlation) _different_correlations_array.append( _different_correlation) if _same_tuple not in _valid_tuples: _valid_tuples.append(_same_tuple) print('Total tuples:', len(_valid_tuples)) print('Total points:', len(_same_correlations_array)) _same_minus_different = \ np.array(_same_correlations_array) - np.array(_different_correlations_array) print('Wilcoxon of same minus different around the zero:') print(wilcoxon(_same_minus_different)) print('Higher same amount:', (_same_minus_different > 0).sum() / len(_same_minus_different)) # plot _fig = go.Figure(data=go.Scatter(x=_same_correlations_array, y=_different_correlations_array, mode='markers', marker={ 'size': 5, 'color': '#ea8500' }, showlegend=False), layout={ 'xaxis': { 'title': 'Same network correlation', 'zeroline': False, 'range': [-1.1, 1.2], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'yaxis': { 'title': 'Different network correlation', 'zeroline': False, 'range': [-1.1, 1.2], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'shapes': [{ 'type': 'line', 'x0': -1, 'y0': -1, 'x1': -1, 'y1': 1, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': 1, 'y1': -1, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': 1, 'y1': 1, 'line': { 'color': 'red', 'width': 2 } }] }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_high_time_' + str(_high_temporal_resolution))
def main(_real_cells=True, _static=False, _dead_dead=False, _live_dead=False, _dead=False, _live=False, _bead=False, _metastasis=False, _bleb=False, _bleb_amount_um=None, _band=True, _high_temporal_resolution=False, _offset_y=0.5): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=_bleb, _is_dead_dead=_dead_dead, _is_live_dead=_live_dead, _is_bead=_bead, _is_metastasis=_metastasis) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples, _real_pairs=_real_cells) _tuples = filtering.by_fake_static_pairs(_tuples, _fake_static_pairs=_static) if _dead_dead is not False or _live_dead is not False: _tuples = filtering.by_dead_live(_tuples, _dead=_dead, _live=_live) _tuples = filtering.by_band(_tuples, _band=_band) if _bleb: _tuples = filtering.by_bleb_amount_um(_tuples, _amount_um=_bleb_amount_um) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': _offset_y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows( _arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _tuples_by_experiment = organize.by_experiment(_tuples) _n = 0 _cells_ranks = [] for _experiment in _tuples_by_experiment: print('Experiment:', _experiment) _experiment_tuples = _tuples_by_experiment[_experiment] for _pivot_tuple in tqdm(_experiment_tuples, desc='Main loop'): _pivot_experiment, _pivot_series_id, _pivot_group = _pivot_tuple _pivot_experiment_properties = load.group_properties( _pivot_experiment, _pivot_series_id, _pivot_group) for _pivot_cell_id, _pivot_cell_correct_match_cell_id in \ zip(['left_cell', 'right_cell'], ['right_cell', 'left_cell']): _pivot_cell = (_pivot_experiment, _pivot_series_id, _pivot_group, _pivot_cell_id) _pivot_cell_correct_match_cell = ( _pivot_experiment, _pivot_series_id, _pivot_group, _pivot_cell_correct_match_cell_id) _pivot_cell_fiber_densities = _experiments_fiber_densities[ _pivot_cell] _pivot_cell_fiber_densities = compute.remove_blacklist( _pivot_experiment, _pivot_series_id, _pivot_experiment_properties['cells_ids'][_pivot_cell_id], _pivot_cell_fiber_densities) _pivot_cell_correlations = [] # correct match _pivot_cell_correct_match_fiber_densities = _experiments_fiber_densities[ _pivot_cell_correct_match_cell] _pivot_cell_correct_match_fiber_densities = compute.remove_blacklist( _pivot_experiment, _pivot_series_id, _pivot_experiment_properties['cells_ids'] [_pivot_cell_correct_match_cell_id], _pivot_cell_correct_match_fiber_densities) _pivot_cell_fiber_densities_filtered, _pivot_cell_correct_match_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _pivot_cell_fiber_densities, _pivot_cell_correct_match_fiber_densities ) # ignore small arrays if len(_pivot_cell_fiber_densities_filtered ) < compute.minimum_time_frames_for_correlation( _pivot_experiment): continue _correlation = compute_lib.correlation( compute_lib.derivative( _pivot_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _pivot_cell_correct_match_fiber_densities_filtered, _n=DERIVATIVE)) _pivot_cell_correlations.append(_correlation) _pivot_cell_correct_match_correlation = _correlation # create list of potential cells _candidate_tuples = [] for _candidate_tuple in _experiment_tuples: _candidate_experiment, _candidate_series_id, _candidate_group = _candidate_tuple for _candidate_cell_id in ['left_cell', 'right_cell']: _candidate_cell = (_candidate_experiment, _candidate_series_id, _candidate_group, _candidate_cell_id) # skip if same cell or correct match if _candidate_cell == _pivot_cell or _candidate_cell == _pivot_cell_correct_match_cell: continue _candidate_tuples.append(_candidate_cell) # compare with each potential candidate, until reached the maximum or nothing to compare with while len(_pivot_cell_correlations ) < POTENTIAL_MATCHES and len(_candidate_tuples) > 0: # sample randomly _candidate_cell = random.choice(_candidate_tuples) _candidate_experiment, _candidate_series_id, _candidate_group, _candidate_cell_id = _candidate_cell _candidate_experiment_properties = load.group_properties( _candidate_experiment, _candidate_series_id, _candidate_group) _candidate_cell_fiber_densities = _experiments_fiber_densities[ _candidate_cell] _candidate_cell_fiber_densities = compute.remove_blacklist( _candidate_experiment, _candidate_series_id, _candidate_experiment_properties['cells_ids'] [_candidate_cell_id], _candidate_cell_fiber_densities) _pivot_cell_fiber_densities_filtered, _candidate_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _pivot_cell_fiber_densities, _candidate_cell_fiber_densities ) # ignore small arrays if len(_pivot_cell_fiber_densities_filtered ) < compute.minimum_time_frames_for_correlation( _pivot_experiment): _candidate_tuples.remove(_candidate_cell) continue _correlation = compute_lib.correlation( compute_lib.derivative( _pivot_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _candidate_cell_fiber_densities_filtered, _n=DERIVATIVE)) _pivot_cell_correlations.append(_correlation) # nothing to compare with if len(_pivot_cell_correlations) == 1: continue # check matchmaking _pivot_cell_correct_match_rank = 1 for _potential_match_correlation in sorted( _pivot_cell_correlations, reverse=True): if _pivot_cell_correct_match_correlation == _potential_match_correlation: break _pivot_cell_correct_match_rank += 1 _n += 1 _cells_ranks.append(_pivot_cell_correct_match_rank) # results _correct_match_probability = 1 / POTENTIAL_MATCHES _first_place_correct_matches = sum( [1 for _rank in _cells_ranks if _rank == 1]) _first_place_fraction = _first_place_correct_matches / _n print('Matchmaking results:') print('Total cells:', _n) print('Correct match probability:', round(_correct_match_probability, 2)) print('Fraction of first place correct matches:', round(_first_place_fraction, 2)) # plot _x = list(range(MAX_RANK)) _x_text = [str(_rank + 1) for _rank in _x[:-1]] + [str(MAX_RANK) + '+'] _ranks_sums = [0 for _rank in _x] for _rank in _cells_ranks: if _rank < MAX_RANK: _ranks_sums[_rank - 1] += 1 else: _ranks_sums[-1] += 1 _y = np.array(_ranks_sums) / _n _fig = go.Figure(data=go.Bar(x=_x, y=_y, marker={'color': '#ea8500'}), layout={ 'xaxis': { 'title': 'Correct match correlation rank', 'zeroline': False, 'tickmode': 'array', 'tickvals': _x, 'ticktext': _x_text }, 'yaxis': { 'title': 'Fraction', 'range': [0, 1.1], 'zeroline': False, 'tickmode': 'array', 'tickvals': [0, 0.5, 1] } }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_real_' + str(_real_cells) + '_static_' + str(_static) + '_dead_dead_' + str(_dead_dead) + '_live_dead_' + str(_live_dead) + '_dead_' + str(_dead) + '_live_' + str(_live) + '_bead_' + str(_bead) + '_metastasis_' + str(_metastasis) + '_bleb_' + str(_bleb) + str(_bleb_amount_um) + '_band_' + str(_band) + '_high_time_' + str(_high_temporal_resolution) + '_y_' + str(_offset_y)) # correct match probability plot _y = [_correct_match_probability] * (MAX_RANK - 1) + [ _correct_match_probability * (POTENTIAL_MATCHES - MAX_RANK + 1) ] _fig = go.Figure(data=go.Bar(x=_x, y=_y, marker={'color': '#ea8500'}), layout={ 'xaxis': { 'title': 'Correct match correlation rank', 'zeroline': False, 'tickmode': 'array', 'tickvals': _x, 'ticktext': _x_text }, 'yaxis': { 'title': 'Fraction', 'range': [0, 1.1], 'zeroline': False, 'tickmode': 'array', 'tickvals': [0, 0.5, 1] } }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_real_' + str(_real_cells) + '_static_' + str(_static) + '_dead_dead_' + str(_dead_dead) + '_live_dead_' + str(_live_dead) + '_dead_' + str(_dead) + '_live_' + str(_live) + '_bead_' + str(_bead) + '_metastasis_' + str(_metastasis) + '_bleb_' + str(_bleb) + str(_bleb_amount_um) + '_band_' + str(_band) + '_high_time_' + str(_high_temporal_resolution) + '_y_' + str(_offset_y) + '_correct_match_prob')
def main(): _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=True, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_time_frames_amount( _tuples, compute.density_time_frame(_experiments[0])) _tuples = filtering.by_main_cell(_tuples) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _time_frame = compute.density_time_frame(_experiment) for _direction in ['left', 'right']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': 'cell', 'direction': _direction, 'time_points': _time_frame }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'direction']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _tuples = organize.by_single_cell_id(_tuples) print('Total experiments:', len(_tuples)) _kpss_y_arrays = [[] for _i in DERIVATIVES] _adf_y_arrays = [[] for _i in DERIVATIVES] for _tuple in tqdm(_tuples, desc='Experiments loop'): _experiment, _series_id, _cell_id = _tuple _cell_fiber_densities = compute_single_cell_mean( _experiment=_experiment, _series_id=_series_id, _cell_tuples=_tuples[_tuple], _windows_dictionary=_windows_dictionary, _fiber_densities=_fiber_densities) for _derivative_index, _derivative in enumerate(DERIVATIVES): _cell_fiber_densities_derivative = compute_lib.derivative( _cell_fiber_densities, _n=_derivative) _, _kpss_p_value, _, _ = kpss(_cell_fiber_densities_derivative, nlags='legacy') _kpss_y_arrays[_derivative_index].append(_kpss_p_value) _, _adf_p_value, _, _, _, _ = adfuller( _cell_fiber_densities_derivative) _adf_y_arrays[_derivative_index].append(_adf_p_value) print('Total cells:', len(_kpss_y_arrays[0])) # print results print('KPSS:') for _derivative_index, _derivative in enumerate(DERIVATIVES): _stationary_count = len([ _value for _value in _kpss_y_arrays[_derivative_index] if _value > 0.05 ]) print( 'Derivative:', _derivative, 'Stationary:', str(_stationary_count / len(_kpss_y_arrays[_derivative_index]) * 100) + '%') print('ADF:') for _derivative_index, _derivative in enumerate(DERIVATIVES): _stationary_count = len([ _value for _value in _adf_y_arrays[_derivative_index] if _value < 0.05 ]) print( 'Derivative:', _derivative, 'Stationary:', str(_stationary_count / len(_adf_y_arrays[_derivative_index]) * 100) + '%') # plot _colors_array = config.colors(3) for _test_name, _y_title, _y_tickvals, _p_value_line, _y_arrays in \ zip( ['kpss', 'adf'], ['KPSS test p-value', 'ADF test p-value'], [[0.05, 0.1], [0.05, 1]], [0.05, 0.05], [_kpss_y_arrays, _adf_y_arrays] ): _fig = go.Figure(data=[ go.Box(y=_y, name=_derivative, boxpoints='all', jitter=1, pointpos=0, line={'width': 1}, fillcolor='white', marker={ 'size': 10, 'color': _color }, opacity=0.7, showlegend=False) for _y, _derivative, _color in zip( _y_arrays, DERIVATIVES_TEXT, _colors_array) ], layout={ 'xaxis': { 'title': 'Fiber density derivative', 'zeroline': False }, 'yaxis': { 'title': _y_title, 'zeroline': False, 'tickmode': 'array', 'tickvals': _y_tickvals }, 'shapes': [{ 'type': 'line', 'x0': DERIVATIVES[0] - 0.75, 'y0': _p_value_line, 'x1': DERIVATIVES[-1] + 0.75, 'y1': _p_value_line, 'line': { 'color': 'red', 'width': 2, 'dash': 'dash' } }] }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_' + _test_name)
def main(_early_time_frames=True): _experiments = all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside' }) _windows_dictionary, _windows_to_compute = compute.windows( _arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute) _heatmap_fiber = [] _heatmap_fiber_change = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _series_normalization = load.normalization_series_file_data( _experiment, _series_id) for _cell_id in ['left_cell', 'right_cell']: _fiber_densities_by_time = [ _fiber_densities[_tuple] for _tuple in _windows_dictionary[(_experiment, _series_id, _group, _cell_id)] ] _cell_fiber_densities = \ _fiber_densities_by_time[TIME_FRAMES[OFFSET_Y]['early'][0] if _early_time_frames else TIME_FRAMES[OFFSET_Y]['late'][0]: TIME_FRAMES[OFFSET_Y]['early'][1] if _early_time_frames else TIME_FRAMES[OFFSET_Y]['late'][1]] _properties = load.group_properties(_experiment, _series_id, _group) _cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids'][_cell_id], _cell_fiber_densities) _cell_fiber_densities = compute.longest_fiber_densities_ascending_sequence( _cell_fiber_densities) # fix if found nan if True in np.isnan(_cell_fiber_densities): _cell_fiber_densities = _cell_fiber_densities[:np.where( np.isnan(_cell_fiber_densities))[0][0]] # not enough data if len(_cell_fiber_densities) < DERIVATIVE + 1: continue _z_score_fiber_density = libs.compute_lib.z_score_array( _array=_cell_fiber_densities, _average=_series_normalization['average'], _std=_series_normalization['std']) if _experiment in ['SN41', 'SN44']: for _start_index in [0, 1, 2]: _heatmap_fiber += _z_score_fiber_density[_start_index::3][ DERIVATIVE:] _heatmap_fiber_change += compute_lib.derivative( _z_score_fiber_density[_start_index::3], _n=DERIVATIVE) else: _heatmap_fiber += _z_score_fiber_density[DERIVATIVE:] _heatmap_fiber_change += compute_lib.derivative( _z_score_fiber_density, _n=DERIVATIVE) print( compute_lib.correlation(_heatmap_fiber, _heatmap_fiber_change, _with_p_value=True)) if PLOT: _y_shape = int(round((Y_LABELS_END - Y_LABELS_START) * Y_BINS)) _x_shape = int(round((X_LABELS_END - X_LABELS_START) * X_BINS)) _total_points = 0 _z_array = np.zeros(shape=(_y_shape, _x_shape)) for _y, _x in zip(_heatmap_fiber_change, _heatmap_fiber): _y_rounded, _x_rounded = int(round(_y * Y_BINS)), int( round(_x * X_BINS)) _y_index, _x_index = int(_y_rounded - Y_LABELS_START * Y_BINS), int(_x_rounded - X_LABELS_START * X_BINS) if 0 <= _y_index < _z_array.shape[ 0] and 0 <= _x_index < _z_array.shape[1]: _z_array[_y_index][_x_index] += 1 _total_points += 1 _z_array = _z_array / _total_points if not CONDITIONAL_NORMALIZATION: _z_array[_z_array == 0] = None else: _z_array_plot = np.zeros(shape=np.array(_z_array).shape) for _fiber_index, _fiber_density_z_score in enumerate(_z_array): _sum = np.sum(_fiber_density_z_score) for _change_index, _change_z_score in enumerate( _fiber_density_z_score): _z_array_plot[_fiber_index][_change_index] = ( _change_z_score / _sum) if _sum != 0 else 0 _z_array_plot[_z_array_plot == 0] = None _fig = go.Figure( data=go.Heatmap(x=np.arange(start=X_LABELS_START, stop=X_LABELS_END, step=1 / X_BINS), y=np.arange(start=Y_LABELS_START, stop=Y_LABELS_END, step=1 / Y_BINS), z=_z_array, colorscale='Viridis', colorbar={ 'tickmode': 'array', 'tickvals': [0, 0.025, 0.05], 'ticktext': ['0', 'Fraction', '0.05'], 'tickangle': -90 }, zmin=Z_MIN, zmax=Z_MAX[CONDITIONAL_NORMALIZATION]), layout={ 'xaxis': { 'title': 'Fiber densities z-score', 'zeroline': False }, 'yaxis': { 'title': 'Change in fiber<br>density (z-score)', 'zeroline': False }, 'shapes': [{ 'type': 'line', 'x0': X_LABELS_START, 'y0': Y_LABELS_START, 'x1': X_LABELS_END, 'y1': Y_LABELS_START, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': X_LABELS_START, 'y0': Y_LABELS_START, 'x1': X_LABELS_START, 'y1': Y_LABELS_END, 'line': { 'color': 'black', 'width': 2 } }] }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_early_' + str(_early_time_frames))
def main(_high_temporal_resolution=True, _offset_y=0.5): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_real_pairs(_tuples) # _tuples = filtering.by_band(_tuples) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) for _cell_id, _direction in product(['left_cell', 'right_cell'], ['inside', 'outside']): _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': _offset_y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': _direction, 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id', 'direction']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _inner_correlations = [] _outer_correlations = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _left_inner_tuple = (_experiment, _series_id, _group, 'left_cell', 'inside') _left_outer_tuple = (_experiment, _series_id, _group, 'left_cell', 'outside') _right_inner_tuple = (_experiment, _series_id, _group, 'right_cell', 'inside') _right_outer_tuple = (_experiment, _series_id, _group, 'right_cell', 'outside') if _left_inner_tuple not in _windows_dictionary or _left_outer_tuple not in _windows_dictionary or \ _right_inner_tuple not in _windows_dictionary or _right_outer_tuple not in _windows_dictionary: continue _properties = load.group_properties(_experiment, _series_id, _group) _left_inner_fiber_densities = _experiments_fiber_densities[ _left_inner_tuple] _left_outer_fiber_densities = _experiments_fiber_densities[ _left_outer_tuple] _right_inner_fiber_densities = _experiments_fiber_densities[ _right_inner_tuple] _right_outer_fiber_densities = _experiments_fiber_densities[ _right_outer_tuple] _left_inner_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['left_cell'], _left_inner_fiber_densities) _left_outer_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['left_cell'], _left_outer_fiber_densities) _right_inner_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['right_cell'], _right_inner_fiber_densities) _right_outer_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['right_cell'], _right_outer_fiber_densities) _left_inner_fiber_densities, _right_inner_fiber_densities = \ compute.longest_same_indices_shared_in_borders_sub_array( _left_inner_fiber_densities, _right_inner_fiber_densities) _left_outer_fiber_densities, _right_outer_fiber_densities = \ compute.longest_same_indices_shared_in_borders_sub_array( _left_outer_fiber_densities, _right_outer_fiber_densities) # ignore small arrays _minimum_time_frame_for_correlation = compute.minimum_time_frames_for_correlation( _experiment) if len(_left_inner_fiber_densities) < _minimum_time_frame_for_correlation or \ len(_left_outer_fiber_densities) < _minimum_time_frame_for_correlation: continue _inner_correlations.append( compute_lib.correlation( compute_lib.derivative(_left_inner_fiber_densities, _n=DERIVATIVE), compute_lib.derivative(_right_inner_fiber_densities, _n=DERIVATIVE))) _outer_correlations.append( compute_lib.correlation( compute_lib.derivative(_left_outer_fiber_densities, _n=DERIVATIVE), compute_lib.derivative(_right_outer_fiber_densities, _n=DERIVATIVE))) print('Total pairs:', len(_inner_correlations)) print( 'Pearson correlation:', compute_lib.correlation(_inner_correlations, _outer_correlations, _with_p_value=True)) print('Wilcoxon of inner around the zero:', wilcoxon(_inner_correlations)) print('Wilcoxon of outer around the zero:', wilcoxon(_outer_correlations)) _inner_minus_outer = np.array(_inner_correlations) - np.array( _outer_correlations) print('Wilcoxon of inner minus outer:', wilcoxon(_inner_minus_outer)) print('Higher inner amount:', (_inner_minus_outer > 0).sum() / len(_inner_minus_outer)) # plot _fig = go.Figure(data=go.Scatter(x=_inner_correlations, y=_outer_correlations, mode='markers', marker={ 'size': 5, 'color': '#ea8500' }, showlegend=False), layout={ 'xaxis': { 'title': 'Inner correlation', 'zeroline': False, 'range': [-1.1, 1.2], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'yaxis': { 'title': 'Outer correlation', 'zeroline': False, 'range': [-1.1, 1.2], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'shapes': [{ 'type': 'line', 'x0': -1, 'y0': -1, 'x1': -1, 'y1': 1, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': 1, 'y1': -1, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': 1, 'y1': 1, 'line': { 'color': 'red', 'width': 2 } }] }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_high_' + str(_high_temporal_resolution) + '_offset_y_' + str(_offset_y))
def compute_z_array(_band=True, _high_temporal_resolution=False, _offset_x=OFFSET_X, _offset_y_start=OFFSET_Y_START, _offset_y_end=OFFSET_Y_END, _offset_y_step=OFFSET_Y_STEP, _offset_z_start=OFFSET_Z_START, _offset_z_end=OFFSET_Z_END, _offset_z_step=OFFSET_Z_STEP): global _tuples, _experiments_fiber_densities, _z_array _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_time_frames_amount( _tuples, compute.minimum_time_frames_for_correlation(_experiments[0])) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples, _band=_band) print('Total tuples:', len(_tuples)) _offsets_y = np.arange(start=_offset_y_start, stop=_offset_y_end + _offset_y_step, step=_offset_y_step) _offsets_z = np.arange(start=_offset_z_start, stop=_offset_z_end + _offset_z_step, step=_offset_z_step) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) for _offset_y, _offset_z, _cell_id in product( _offsets_y, _offsets_z, ['left_cell', 'right_cell']): _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': _offset_y, 'offset_z': _offset_z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'offset_y', 'offset_z', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } # clean _fiber_densities = None _windows_dictionary = None _windows_to_compute = None _arguments = [] for (_offset_y_index, _offset_y), (_offset_z_index, _offset_z) in \ product(enumerate(_offsets_y), enumerate(_offsets_z)): _arguments.append( (_offset_y_index, _offset_y, _offset_z_index, _offset_z)) _z_array = np.zeros(shape=(len(_offsets_y), len(_offsets_z))) with Pool(CPUS_TO_USE) as _p: for _answer in tqdm(_p.imap_unordered(compute_data, _arguments), total=len(_arguments), desc='Computing heatmap'): _offset_y_index, _offset_z_index, _mean = _answer _z_array[_offset_y_index, _offset_z_index] = _mean _p.close() _p.join() return _z_array
def compute_fiber_densities(_offset_y=0.5, _high_temporal_resolution=False): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_band(_tuples) _tuples = filtering.by_real_fake_pairs(_tuples, _real_fake_pairs=False) _experiments_matched = organize.by_matched_real_and_fake(_tuples) print('Total matched pairs:', len(_experiments_matched)) _arguments = [] for _matched_tuple in _experiments_matched: for _tuple in _matched_tuple: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': _offset_y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows( _arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _tuples_by_experiment = organize.by_experiment(_tuples) # same (real, fake), different (real, fake) _correlations = [[[], []], [[], []]] _valid_real_tuples = [] for _experiment in _tuples_by_experiment: print('Experiment:', _experiment) _experiment_tuples = _tuples_by_experiment[_experiment] _experiments_matched = organize.by_matched_real_and_fake( _experiment_tuples) print('Matched pairs:', len(_experiments_matched)) for _same_index in tqdm(range(len(_experiments_matched)), desc='Main loop'): for _group_type_index in [0, 1]: _same_tuple = _experiments_matched[_same_index][ _group_type_index] _same_experiment, _same_series, _same_group = _same_tuple _same_left_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'left_cell') ] _same_right_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'right_cell') ] _same_properties = \ load.group_properties(_same_experiment, _same_series, _same_group) _same_left_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['left_cell'], _same_left_cell_fiber_densities) _same_right_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['right_cell'], _same_right_cell_fiber_densities) _same_left_cell_fiber_densities_filtered, _same_right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_left_cell_fiber_densities, _same_right_cell_fiber_densities ) # ignore small arrays if len(_same_left_cell_fiber_densities_filtered ) < compute.minimum_time_frames_for_correlation( _same_experiment): for _different_index in range(len(_experiments_matched)): if _same_index != _different_index: # for all combinations for _i in range(4): _correlations[0][_group_type_index].append( None) _correlations[1][_group_type_index].append( None) continue _same_correlation = compute_lib.correlation( compute_lib.derivative( _same_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _same_right_cell_fiber_densities_filtered, _n=DERIVATIVE)) for _different_index in range(len(_experiments_matched)): if _same_index != _different_index: _different_tuple = _experiments_matched[ _different_index][_group_type_index] _different_experiment, _different_series, _different_group = _different_tuple for _same_cell_id, _different_cell_id in product( ['left_cell', 'right_cell'], ['left_cell', 'right_cell']): _same_fiber_densities = _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, _same_cell_id)] _different_fiber_densities = _experiments_fiber_densities[ (_different_experiment, _different_series, _different_group, _different_cell_id)] _different_properties = load.group_properties( _different_experiment, _different_series, _different_group) _same_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids'][_same_cell_id], _same_fiber_densities) _different_fiber_densities = compute.remove_blacklist( _different_experiment, _different_series, _different_properties['cells_ids'] [_different_cell_id], _different_fiber_densities) _same_fiber_densities_filtered, _different_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_fiber_densities, _different_fiber_densities ) # ignore small arrays if len( _same_fiber_densities_filtered ) < compute.minimum_time_frames_for_correlation( _different_experiment): _correlations[0][_group_type_index].append( None) _correlations[1][_group_type_index].append( None) continue _different_correlation = compute_lib.correlation( compute_lib.derivative( _same_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative( _different_fiber_densities_filtered, _n=DERIVATIVE)) _correlations[0][_group_type_index].append( _same_correlation) _correlations[1][_group_type_index].append( _different_correlation) if _group_type_index == 0 and _same_tuple not in _valid_real_tuples: _valid_real_tuples.append(_same_tuple) print('Total real tuples:', len(_valid_real_tuples)) _distances_from_y_equal_x = [[], []] _same_correlations, _different_correlations = _correlations _same_real_correlations, _same_fake_correlations = _same_correlations _different_real_correlations, _different_fake_correlations = _different_correlations for _same_real, _same_fake, _different_real, _different_fake in \ zip(_same_real_correlations, _same_fake_correlations, _different_real_correlations, _different_fake_correlations): # one of the correlations is none - not valid if None in [_same_real, _same_fake, _different_real, _different_fake]: continue for _group_type_index, _same, _different in \ zip([0, 1], [_same_real, _same_fake], [_different_real, _different_fake]): _point_distance = compute_lib.distance_from_a_point_to_a_line( _line=[-1, -1, 1, 1], _point=[_same, _different]) if _same > _different: _distances_from_y_equal_x[_group_type_index].append( _point_distance) else: _distances_from_y_equal_x[_group_type_index].append( -_point_distance) return _distances_from_y_equal_x
def main(_real_cells=True, _static=False, _band=True, _high_temporal_resolution=False): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False ) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples, _real_pairs=_real_cells) _tuples = filtering.by_fake_static_pairs(_tuples, _fake_static_pairs=_static) _tuples = filtering.by_band(_tuples, _band=_band) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping(_experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _z_array = np.zeros(shape=(len(BY), len(BY))) for (_padding_index, _padding_by), (_space_index, _space_by) in product(enumerate(BY), enumerate(BY)): print('Padding by: ', _padding_by, ', space by: ', _space_by) _correlation = compute_data(_tuples, _arguments, _padding_y_by=_padding_by, _padding_z_by=_padding_by, _space_y_by=_space_by, _space_z_by=_space_by) _z_array[_space_index, _padding_index] = _correlation # plot _colors_array = ['white', config.colors(1)] _fig = go.Figure( data=go.Heatmap( x=BY, y=BY, z=_z_array, colorscale=sns.color_palette(_colors_array).as_hex(), colorbar={ 'tickmode': 'array', 'tickvals': [0, 0.5, 1], 'ticktext': ['0', 'Correlation', '1'], 'tickangle': -90 }, showscale=True, zmin=0, zmax=1 ), layout={ 'xaxis': { 'title': 'Border size (cell diameter)', 'zeroline': False, 'tickmode': 'array', 'tickvals': [0, 0.5, 1, 1.5, 2] }, 'yaxis': { 'title': 'Space from quantification window (cell diameter)', 'zeroline': False, 'tickmode': 'array', 'tickvals': [0, 0.5, 1, 1.5, 2] } } ) save.to_html( _fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot_high_time_' + str(_high_temporal_resolution) + '_band_' + str(_band) )
def main(): _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=False, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False ) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples) _tuples = filtering.by_time_frames_amount(_tuples, MINIMUM_TIME_FRAMES) _tuples = filtering.by_pair_distance_range(_tuples, [MINIMUM_PAIR_DISTANCE, sys.maxsize]) _triplets = filtering.by_triplets(_tuples) print('Total triplets:', len(_triplets)) _arguments = [] for _triplet in _triplets: for _tuple in _triplet: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping(_experiment, _series_id, _group, OFFSET_X) for _cell_id in ['left_cell', 'right_cell']: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': OFFSET_Y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id']) _fiber_densities = compute.fiber_densities(_windows_to_compute, _subtract_border=True) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _same_correlations_arrays = [[] for _i in _triplets] _different_correlations_arrays = [[] for _i in _triplets] _names_array = [] for _triplet_index, _triplet in enumerate(_triplets): for _same_index in tqdm(range(len(_triplet)), desc='Main loop'): _same_tuple = _triplet[_same_index] _same_experiment, _same_series, _same_group = _same_tuple _same_left_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'left_cell') ] _same_right_cell_fiber_densities = \ _experiments_fiber_densities[ (_same_experiment, _same_series, _same_group, 'right_cell') ] _same_properties = \ load.group_properties(_same_experiment, _same_series, _same_group) _same_left_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['left_cell'], _same_left_cell_fiber_densities ) _same_right_cell_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids']['right_cell'], _same_right_cell_fiber_densities ) _same_left_cell_fiber_densities_filtered, _same_right_cell_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_left_cell_fiber_densities, _same_right_cell_fiber_densities ) _same_correlation = compute_lib.correlation( compute_lib.derivative(_same_left_cell_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative(_same_right_cell_fiber_densities_filtered, _n=DERIVATIVE) ) for _different_index in range(len(_triplet)): if _same_index != _different_index: _different_tuple = _triplet[_different_index] _different_experiment, _different_series, _different_group = \ _different_tuple for _same_cell_id, _different_cell_id in product(['left_cell', 'right_cell'], ['left_cell', 'right_cell']): _same_fiber_densities = _experiments_fiber_densities[( _same_experiment, _same_series, _same_group, _same_cell_id )] _different_fiber_densities = _experiments_fiber_densities[( _different_experiment, _different_series, _different_group, _different_cell_id )] _different_properties = load.group_properties( _different_experiment, _different_series, _different_group ) _same_fiber_densities = compute.remove_blacklist( _same_experiment, _same_series, _same_properties['cells_ids'][_same_cell_id], _same_fiber_densities ) _different_fiber_densities = compute.remove_blacklist( _different_experiment, _different_series, _different_properties['cells_ids'][_different_cell_id], _different_fiber_densities ) _same_fiber_densities_filtered, _different_fiber_densities_filtered = \ compute.longest_same_indices_shared_in_borders_sub_array( _same_fiber_densities, _different_fiber_densities ) _different_correlation = compute_lib.correlation( compute_lib.derivative(_same_fiber_densities_filtered, _n=DERIVATIVE), compute_lib.derivative(_different_fiber_densities_filtered, _n=DERIVATIVE) ) _same_correlations_arrays[_triplet_index].append(_same_correlation) _different_correlations_arrays[_triplet_index].append(_different_correlation) _names_array.append('Triplet #' + str(_triplet_index + 1)) print('Total points:', len(np.array(_same_correlations_arrays).flatten())) _same_minus_different = \ np.array(_same_correlations_arrays).flatten() - np.array(_different_correlations_arrays).flatten() print('Wilcoxon of same minus different around the zero:') print(wilcoxon(_same_minus_different)) print('Higher same amount:', (_same_minus_different > 0).sum() / len(_same_minus_different)) # plot _colors_array = ['green', 'blue', config.colors(1)] _fig = go.Figure( data=[ go.Scatter( x=_same_correlations_array, y=_different_correlations_array, name=_name, mode='markers', marker={ 'size': 15, 'color': _color }, opacity=0.7 ) for _same_correlations_array, _different_correlations_array, _name, _color in zip(_same_correlations_arrays, _different_correlations_arrays, _names_array, _colors_array) ], layout={ 'xaxis': { 'title': 'Same network correlation', 'zeroline': False, 'range': [-1.1, 1.2], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'yaxis': { 'title': 'Different network correlation', 'zeroline': False, 'range': [-1.1, 1.2], 'tickmode': 'array', 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'legend': { 'xanchor': 'left', 'x': 0.1, 'yanchor': 'top', 'bordercolor': 'black', 'borderwidth': 2, 'bgcolor': 'white' }, 'shapes': [ { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': -1, 'y1': 1, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': 1, 'y1': -1, 'line': { 'color': 'black', 'width': 2 } }, { 'type': 'line', 'x0': -1, 'y0': -1, 'x1': 1, 'y1': 1, 'line': { 'color': 'red', 'width': 2 } } ] } ) save.to_html( _fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot' )
def main(): _experiments = config.all_experiments() _experiments = filtering.by_categories(_experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=None, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, PAIR_DISTANCE_RANGE) _tuples = filtering.by_real_pairs(_tuples, _real_pairs=REAL) _tuples = filtering.by_fake_static_pairs(_tuples, _fake_static_pairs=STATIC) _tuples = filtering.by_real_fake_pairs(_tuples, _real_fake_pairs=REAL_FAKE) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in tqdm(_tuples, desc='Setting windows to compute'): _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, _offset_x=0) for _cell_id in ['left_cell', 'right_cell']: for _offset_y in OFFSETS_Y: _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': config.QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': config.QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': config.QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': 0, 'offset_y': _offset_y, 'offset_z': 0, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = \ compute.windows(_arguments, _keys=['experiment', 'series_id', 'group', 'cell_id', 'offset_y']) _fiber_densities = compute.fiber_densities(_windows_to_compute) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _headers = [ 'time_frame', 'experiment', 'series_id', 'group', 'left_cell_id', 'right_cell_id', 'band', 'fake_following', 'fake_static', 'pair_distance_in_cell_diameter', 'offset_z', 'left_cell_fiber_density', 'left_cell_fiber_density_z_score', 'left_cell_out_of_boundaries', 'right_cell_fiber_density', 'right_cell_fiber_density_z_score', 'right_cell_out_of_boundaries' ] _csv_path = os.path.join(paths.OUTPUTS, 'experiments_density_cell_pairs.csv') with open(_csv_path, 'w', newline='') as _csv_file: _csv_writer = csv.writer(_csv_file) _csv_writer.writerow(_headers) for _tuple in tqdm(_tuples, desc='Main loop'): _experiment, _series_id, _group = _tuple _group_properties = load.group_properties(_experiment, _series_id, _group) _left_cell_id, _right_cell_id = _group_properties[ 'cells_ids'].values() _band = _group_properties['band'] _fake_following, _fake_static = _group_properties[ 'fake'], _group_properties['static'] _average, _std = load.normalization_series_file_data( _experiment, _series_id).values() for _offset_y in OFFSETS_Y: _left_cell_fiber_densities = \ _experiments_fiber_densities[(_experiment, _series_id, _group, 'left_cell', _offset_y)] _right_cell_fiber_densities = \ _experiments_fiber_densities[(_experiment, _series_id, _group, 'right_cell', _offset_y)] _left_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _left_cell_id, _left_cell_fiber_densities) _right_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _right_cell_id, _right_cell_fiber_densities) for _time_frame, (_left_cell_fiber_density, _right_cell_fiber_density) in \ enumerate(zip(_left_cell_fiber_densities, _right_cell_fiber_densities)): _pair_distance = compute.pair_distance_in_cell_size_time_frame( _experiment, _series_id, _group, _time_frame) _left_cell_fiber_density, _left_cell_out_of_boundaries = _left_cell_fiber_density _right_cell_fiber_density, _right_cell_out_of_boundaries = _right_cell_fiber_density _left_cell_fiber_density_z_score = compute_lib.z_score( _left_cell_fiber_density, _average, _std) _right_cell_fiber_density_z_score = compute_lib.z_score( _right_cell_fiber_density, _average, _std) _csv_writer.writerow([ _time_frame, _experiment, _series_id, _group, _left_cell_id, _right_cell_id, _band, _fake_following, _fake_static, _pair_distance, _offset_y, _left_cell_fiber_density, _left_cell_fiber_density_z_score, _left_cell_out_of_boundaries, _right_cell_fiber_density, _right_cell_fiber_density_z_score, _right_cell_out_of_boundaries ])
def main(_high_temporal_resolution=False, _pair_distance_range=None): if _pair_distance_range is None: _pair_distance_range = PAIR_DISTANCE_RANGE _experiments = all_experiments() _experiments = filtering.by_categories( _experiments=_experiments, _is_single_cell=False, _is_high_temporal_resolution=_high_temporal_resolution, _is_bleb=False, _is_dead_dead=False, _is_live_dead=False, _is_bead=False, _is_metastasis=False) _experiments = ['SN16'] _tuples = load.experiments_groups_as_tuples(_experiments) _tuples = filtering.by_pair_distance_range(_tuples, _pair_distance_range) _tuples = filtering.by_real_pairs(_tuples) _tuples = filtering.by_band(_tuples) print('Total tuples:', len(_tuples)) _arguments = [] for _tuple in _tuples: _experiment, _series_id, _group = _tuple _latest_time_frame = compute.latest_time_frame_before_overlapping( _experiment, _series_id, _group, OFFSET_X) for _cell_id, _offset_y in product(['left_cell', 'right_cell'], [0, OFFSET_Y]): _arguments.append({ 'experiment': _experiment, 'series_id': _series_id, 'group': _group, 'length_x': QUANTIFICATION_WINDOW_LENGTH_IN_CELL_DIAMETER, 'length_y': QUANTIFICATION_WINDOW_HEIGHT_IN_CELL_DIAMETER, 'length_z': QUANTIFICATION_WINDOW_WIDTH_IN_CELL_DIAMETER, 'offset_x': OFFSET_X, 'offset_y': _offset_y, 'offset_z': OFFSET_Z, 'cell_id': _cell_id, 'direction': 'inside', 'time_points': _latest_time_frame }) _windows_dictionary, _windows_to_compute = compute.windows( _arguments, _keys=['experiment', 'series_id', 'group', 'cell_id', 'offset_y']) _fiber_densities = compute.fiber_densities(_windows_to_compute) _experiments_fiber_densities = { _key: [_fiber_densities[_tuple] for _tuple in _windows_dictionary[_key]] for _key in _windows_dictionary } _no_offset_derivatives_array = [] _offset_derivatives_array = [] for _tuple in tqdm(_tuples, desc='Main loop'): _experiment, _series_id, _group = _tuple _no_offset_left_cell_fiber_densities = _experiments_fiber_densities[( _experiment, _series_id, _group, 'left_cell', 0)] _no_offset_right_cell_fiber_densities = _experiments_fiber_densities[( _experiment, _series_id, _group, 'right_cell', 0)] _offset_left_cell_fiber_densities = _experiments_fiber_densities[( _experiment, _series_id, _group, 'left_cell', OFFSET_Y)] _offset_right_cell_fiber_densities = _experiments_fiber_densities[( _experiment, _series_id, _group, 'right_cell', OFFSET_Y)] _properties = \ load.group_properties(_experiment, _series_id, _group) _no_offset_left_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['left_cell'], _no_offset_left_cell_fiber_densities) _no_offset_right_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['right_cell'], _no_offset_right_cell_fiber_densities) _offset_left_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['left_cell'], _offset_left_cell_fiber_densities) _offset_right_cell_fiber_densities = compute.remove_blacklist( _experiment, _series_id, _properties['cells_ids']['right_cell'], _offset_right_cell_fiber_densities) _normalization = load.normalization_series_file_data( _experiment, _series_id) for _cell_pair in [(_no_offset_left_cell_fiber_densities, _offset_left_cell_fiber_densities), (_no_offset_right_cell_fiber_densities, _offset_right_cell_fiber_densities)]: for _time_frame in range( 1, min(len(_cell_pair[0]), len(_cell_pair[1]))): _no_offset_cell_fiber_density_previous = _cell_pair[0][ _time_frame - 1] _no_offset_cell_fiber_density = _cell_pair[0][_time_frame] _offset_cell_fiber_density_previous = _cell_pair[1][_time_frame - 1] _offset_cell_fiber_density = _cell_pair[1][_time_frame] if any([ _no_offset_cell_fiber_density_previous[1], _no_offset_cell_fiber_density[1], _offset_cell_fiber_density_previous[1], _offset_cell_fiber_density[1] ]): continue _no_offset_cell_fiber_density_previous_z_score = compute_lib.z_score( _x=_no_offset_cell_fiber_density_previous[0], _average=_normalization['average'], _std=_normalization['std']) _no_offset_cell_fiber_density_z_score = compute_lib.z_score( _x=_no_offset_cell_fiber_density[0], _average=_normalization['average'], _std=_normalization['std']) _offset_cell_fiber_density_previous_z_score = compute_lib.z_score( _x=_offset_cell_fiber_density_previous[0], _average=_normalization['average'], _std=_normalization['std']) _offset_cell_fiber_density_z_score = compute_lib.z_score( _x=_offset_cell_fiber_density[0], _average=_normalization['average'], _std=_normalization['std']) _no_offset_derivative = _no_offset_cell_fiber_density_z_score - _no_offset_cell_fiber_density_previous_z_score _offset_derivative = _offset_cell_fiber_density_z_score - _offset_cell_fiber_density_previous_z_score _no_offset_derivatives_array.append(_no_offset_derivative) _offset_derivatives_array.append(_offset_derivative) print('Total points:', len(_no_offset_derivatives_array)) _offset_minus_no_offset = \ np.array(_offset_derivatives_array) - np.array(_no_offset_derivatives_array) print('Wilcoxon of offset minus no offset around the zero:') print(wilcoxon(_offset_minus_no_offset)) print('Higher offset amount:', (_offset_minus_no_offset > 0).sum() / len(_offset_minus_no_offset)) # plot _fig = go.Figure( data=go.Scatter(x=_no_offset_derivatives_array, y=_offset_derivatives_array, mode='markers', marker={ 'size': 5, 'color': '#ea8500' }, showlegend=False), layout={ 'xaxis': { 'title': 'No offset derivative', # 'zeroline': False, # 'range': [-1.1, 1.2], # 'tickmode': 'array', # 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'yaxis': { 'title': 'Offset derivative', # 'zeroline': False, # 'range': [-1.1, 1.2], # 'tickmode': 'array', # 'tickvals': [-1, -0.5, 0, 0.5, 1] }, 'shapes': [ # { # 'type': 'line', # 'x0': -1, # 'y0': -1, # 'x1': -1, # 'y1': 1, # 'line': { # 'color': 'black', # 'width': 2 # } # }, # { # 'type': 'line', # 'x0': -1, # 'y0': -1, # 'x1': 1, # 'y1': -1, # 'line': { # 'color': 'black', # 'width': 2 # } # }, { 'type': 'line', 'x0': -5, 'y0': -5, 'x1': 5, 'y1': 5, 'line': { 'color': 'red', 'width': 2 } } ] }) save.to_html(_fig=_fig, _path=os.path.join(paths.PLOTS, save.get_module_name()), _filename='plot')