def test_finder_with_unknown_events(finder: BaseFinder, imported_data: ImportedData, parameters: list, plot_reconnection: bool = True, interval: int = 24) -> List[list]: """ Returns the possible reconnection times as well as the distance from the sun at this time :param finder: method to find the reconnection events, right now CorrelationFinder :param imported_data: ImportedData :param parameters: parameters that will be used in the finder :param plot_reconnection: if True, every time a reconnection is detected, it is plotted :param interval: interval over which the data is analysed to detect events :return: list of possible reconnection events and associated radius from the Sun """ duration = imported_data.duration start = imported_data.start_datetime probe = imported_data.probe reconnection_events = [] for n in range(np.int(duration / interval)): try: data = get_probe_data(probe=probe, start_date=start.strftime('%d/%m/%Y'), start_hour=start.hour, duration=interval) reconnection = finder.find_magnetic_reconnections(data, *parameters) if reconnection: for event in reconnection: radius = data.data['r_sun'].loc[event] reconnection_events.append([event, radius]) if reconnection and plot_reconnection: plot_imported_data(data, DEFAULT_PLOTTED_COLUMNS + [ ('correlation_sum', 'correlation_sum_outliers'), ('correlation_diff', 'correlation_diff_outliers')]) except Exception: print('Exception in test_finder_with_unknown_events') start = start + timedelta(hours=interval) return reconnection_events
def plot_temperature_as_function_of_dist(events: List[List[Union[datetime, int]]]): temp, dist = [], [] for event, probe in events: try: start = event - timedelta(hours=2) imported_data = get_probe_data( probe=probe, start_date=start.strftime('%d/%m/%Y'), start_hour=start.hour, duration=4) imported_data.data.dropna(inplace=True) radius = imported_data.data.loc[event - timedelta(minutes=4):event, 'r_sun'][0] duration, event_start, event_end = find_intervals( imported_data, event) left_interval_end, right_interval_start = event_start, event_end perpendicular_temperature, parallel_temperature = imported_data.data[ 'Tp_perp'], imported_data.data['Tp_par'] total_temperature = (2 * perpendicular_temperature + parallel_temperature) / 3 t_exhaust = np.percentile( (total_temperature.loc[left_interval_end:right_interval_start] ).values, 90) print(event, probe, t_exhaust) dist.append(radius) temp.append(t_exhaust) except ValueError: print('Value error') plt.plot(dist, temp, '.') plt.title('Exhaust temperature against distance from the sun') plt.xlabel('Distance from the sun [AU]') plt.ylabel('Exhaust temperature [K]') plt.show()
def send_dates_to_csv(filename: str, events_list: List[datetime], probe: int, add_radius: bool = True): """ :param filename: name of the output file :param events_list: list of events to send to csv :param probe: probe corresponding to the events :param add_radius: if True, adds the position of the probe at each event :return: """ with open(filename + '.csv', 'w', newline='') as csv_file: fieldnames = ['year', 'month', 'day', 'hours', 'minutes', 'seconds'] if add_radius: fieldnames.append('radius') writer = csv.DictWriter(csv_file, fieldnames=fieldnames) writer.writeheader() for reconnection_date in events_list: year, month, day = reconnection_date.year, reconnection_date.month, reconnection_date.day hour, minutes, seconds = reconnection_date.hour, reconnection_date.minute, reconnection_date.second if add_radius: start = reconnection_date - timedelta(hours=1) imported_data = get_probe_data(probe=probe, start_date=start.strftime('%d/%m/%Y'), start_hour=start.hour, duration=2) radius = imported_data.data['r_sun'].loc[ reconnection_date - timedelta(minutes=1): reconnection_date + timedelta(minutes=1)][0] writer.writerow( {'year': year, 'month': month, 'day': day, 'hours': hour, 'minutes': minutes, 'seconds': seconds, 'radius': radius}) if not add_radius: writer.writerow( {'year': year, 'month': month, 'day': day, 'hours': hour, 'minutes': minutes, 'seconds': seconds})
def get_data(dates: list, probe: int = 2) -> List[ImportedData]: """ Gets the data as ImportedData for the given start and end dates (a lot of data is missing for Helios 1) :param dates: list of start and end dates when the spacecraft is at a location smaller than the given radius :param probe: 1 or 2 for Helios 1 or 2, can also be 'ulysses' or 'imp_8' :return: a list of ImportedData for the given dates """ imported_data = [] for n in range(len(dates)): start, end = dates[n][0], dates[n][1] delta_t = end - start hours = np.int(delta_t.total_seconds() / 3600) start_date = start.strftime('%d/%m/%Y') try: _data = get_probe_data(probe=probe, start_date=start_date, duration=hours) imported_data.append(_data) except Exception: print( 'Previous method not working, switching to "day-to-day" method' ) hard_to_get_data = [] interval = 24 number_of_loops = np.int(hours / interval) for loop in range(number_of_loops): try: hard_data = get_probe_data( probe=probe, start_date=start.strftime('%d/%m/%Y'), duration=interval) hard_to_get_data.append(hard_data) except Exception: potential_end_time = start + timedelta(hours=interval) print('Not possible to download data between ' + str(start) + ' and ' + str(potential_end_time)) start = start + timedelta(hours=interval) for loop in range(len(hard_to_get_data)): imported_data.append(hard_to_get_data[n]) return imported_data
def mcc_from_parameters(mcc_parameters: dict, finder: BaseFinder = CorrelationFinder(), event_list=events_list) -> List[Union[float, dict]]: """ Returns the mcc with corresponding sigma_sum, sigma_diff and minutes_b :param mcc_parameters: dictionary of the parameters to be tested :param finder: finder to be used in the tests :param event_list: list of events from which the mcc is calculated :return: list containing the mcc and associated parameters """ f_n, t_n, t_p, f_p = 0, 0, 0, 0 for event, probe, reconnection_number in event_list: print(event, reconnection_number) interval = 3 start_time = event - timedelta(hours=interval / 2) start_hour = event.hour data = get_probe_data(probe=probe, start_date=start_time.strftime('%d/%m/%Y'), start_hour=start_hour, duration=interval) # making sure this function can possibly be used with other finders # this way we unfold the arguments necessary for the finder, that are fed in the function split_of_params = len(mcc_parameters) - 2 list_of_params = [ mcc_parameters[key] for key in list(mcc_parameters.keys()) ] reconnection_corr = finder.find_magnetic_reconnections( data, *list_of_params[:split_of_params]) reconnection = test_reconnection_lmn(reconnection_corr, probe, *list_of_params[split_of_params:]) if reconnection_number == 0: if len(reconnection) == 0: # nothing detected, which is good t_n += 1 else: # too many things detected f_p += len(reconnection) else: if len(reconnection) < reconnection_number: # not enough detected f_n += reconnection_number - len(reconnection) t_p += len(reconnection) print(reconnection_number, len(reconnection)) elif len(reconnection ) == reconnection_number: # just enough events detected t_p += len(reconnection) else: # more detected than real f_p += len(reconnection) - reconnection_number t_p += reconnection_number print( f' true positives: {t_p}\n true negatives: {t_n}\n false positives: {f_p}\n false negatives: {f_n}' ) mcc_value = get_mcc(t_p, t_n, f_p, f_n) print('MCC', mcc_value, mcc_parameters) return [mcc_value, mcc_parameters]
def hybrid_mva( event_date, probe, duration: int = 4, outside_interval: int = 10, inside_interval: int = 2, mva_interval: int = 30) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: start_time = event_date - timedelta(hours=duration / 2) imported_data = get_probe_data(probe=probe, start_date=start_time.strftime('%d/%m/%Y'), start_hour=start_time.hour, duration=duration) imported_data.data.dropna(inplace=True) b = get_b(imported_data, event_date, interval=mva_interval) L, M, N = mva(b) b1, b2, v1, v2, density_1, density_2, t_par_1, t_perp_1, t_par_2, t_perp_2 = get_side_data( imported_data, event_date, outside_interval, inside_interval) L, M, N = hybrid(L, b1, b2) return L, M, N
def n_to_shear(): """ Plots relationships between solar wind characteristics :return: """ density, angle, guide = [], [], [] events = create_events_list_from_csv_files([['helios1_magrec2.csv', 1], ['helios1mag_rec3.csv', 1]]) events += create_events_list_from_csv_files([['helios2_magrec2.csv', 2], ['helios2mag_rec3.csv', 2]]) for n in range(len(events)): print(events[n]) start = events[n][0] - timedelta(hours=2) imported_data = get_probe_data(probe=events[n][1], start_date=start.strftime('%d/%m/%Y'), start_hour=start.hour, duration=4) imported_data.data.dropna(inplace=True) duration, event_start, event_end = find_intervals( imported_data, events[n][0]) left_interval_end, right_interval_start = event_start, event_end left_interval_start = event_start - timedelta(minutes=5) right_interval_end = event_end + timedelta(minutes=5) b_l_left, b_l_right, b_m_left, b_m_right, n_left, n_right = get_n_b( events[n][0], events[n][1], imported_data, left_interval_start, left_interval_end, right_interval_start, right_interval_end, guide_field=True) b_g = (b_l_left + b_l_right) / (b_m_left + b_m_right) shear, a, b, c = get_shear_angle([events[n]]) if shear: angle.append(shear[0]) n_p = (n_left + n_right) / 2 density.append(n_p) guide.append(b_g) plt.scatter(angle, density) plt.show()
def plot_walen_test( event_date: datetime, probe: int, duration: int = 4, outside_interval: int = 10, inside_interval: int = 2) -> List[List[Union[str, datetime, float]]]: """ Plots the expected Alfven speed at the boundaries of the exhaust (work in progress) :param event_date: date of the reconnection event :param probe: probe that detected the event :param duration: duration that we download the data for :param outside_interval: interval outside the event that we consider the data for :param inside_interval: interval inside the event that we consider the data for :return: Alfven speed at the exhaust """ start_time = event_date - timedelta(hours=duration / 2) imported_data = get_probe_data(probe=probe, start_date=start_time.strftime('%d/%m/%Y'), start_hour=start_time.hour, duration=duration) imported_data.data.dropna(inplace=True) b = get_b(imported_data, event_date, 30) L, M, N = mva(b) b1, b2, v1, v2, density_1, density_2, T_par_1, T_perp_1, T_par_2, T_perp_2 = get_side_data( imported_data, event_date, outside_interval, inside_interval) L, M, N = hybrid(L, b1, b2) logger.debug('LMN:', L, M, N) b1_changed, b2_changed, v1_changed, v2_changed = change_b_and_v( b1, b2, v1, v2, L, M, N) b1_L, b2_L, b1_M, b2_M = b1_changed[0], b2_changed[0], b1_changed[ 1], b2_changed[1] v1_L, v2_L = v1_changed[0], v2_changed[0] theoretical_v2_plus, theoretical_v2_minus = get_alfven_speed( b1_L, b2_L, v1_L, v2_L, density_1, density_2) theoretical_time = event_date + timedelta(minutes=inside_interval / 2) return [['v_l', theoretical_time, theoretical_v2_plus], ['v_l', theoretical_time, theoretical_v2_minus]]
def get_shear_angle( events_list: List[List[Union[datetime, int]]] ) -> Tuple[List[np.ndarray], List[list], List[list], List[list]]: """ Finds the shear angle of events :param events_list: list of events to be analysed :return: shear angles, and lists of events with low, medium and high shear angles """ shear, small_shear, big_shear, medium_shear = [], [], [], [] probe_for_title = [] for event, probe in events_list: print(event) if probe not in probe_for_title: probe_for_title.append(probe) start = event - timedelta(hours=1) imported_data = get_probe_data(probe=probe, start_date=start.strftime('%d/%m/%Y'), start_hour=start.hour, duration=2) imported_data.data.dropna(inplace=True) duration, event_start, event_end = find_intervals(imported_data, event) left_interval_end, right_interval_start = event_start, event_end left_interval_start = event_start - timedelta(minutes=5) right_interval_end = event_end + timedelta(minutes=5) b_left = (np.array([ np.mean( (imported_data.data.loc[left_interval_start:left_interval_end, 'Bx']).values), np.mean( (imported_data.data.loc[left_interval_start:left_interval_end, 'By']).values), np.mean( (imported_data.data.loc[left_interval_start:left_interval_end, 'Bz']).values) ])) b_right = (np.array([ np.mean(( imported_data.data.loc[right_interval_start:right_interval_end, 'Bx']).values), np.mean(( imported_data.data.loc[right_interval_start:right_interval_end, 'By']).values), np.mean(( imported_data.data.loc[right_interval_start:right_interval_end, 'Bz']).values) ])) br_mag = np.sqrt(b_left[0]**2 + b_left[1]**2 + b_left[2]**2) bl_mag = np.sqrt(b_right[0]**2 + b_right[1]**2 + b_right[2]**2) theta = np.arccos((np.dot(b_right, b_left) / (bl_mag * br_mag))) theta = np.degrees(theta) if not np.isnan(theta): shear.append(theta) if theta <= 90: small_shear.append([event, probe]) elif theta > 135: big_shear.append([event, probe]) else: medium_shear.append([event, probe]) print('shear', shear) # plt.hist(shear, bins=10, width=10) # plt.xlabel('Shear angle in degrees') # plt.ylabel('Frequency') # title_for_probe = '' # for loop in range(len(probe_for_title)): # if len(probe_for_title) == 1 or loop == len(probe_for_title)-1: # comma = '' # else: # comma = ',' # title_for_probe = title_for_probe + str(probe_for_title[loop]) + comma # # plt.title('Shear angle analysis for probes ' + title_for_probe) # plt.show() return shear, small_shear, big_shear, medium_shear
def temperature_analysis( events: List[List[Union[datetime, int]]]) -> List[float]: """ Analyses the actual and theoretical temperature increases :param events: list of reconnection events dates and associated probes :return: relations between theoretical and actual temperature increases """ satisfied_test = 0 use_2_b = True print(len(events)) total_t, par_t, perp_t, t_diff = [], [], [], [] shear, small_shear, big_shear, medium_shear = get_shear_angle(events) for event, probe in events: print(event, probe) try: start = event - timedelta(hours=2) imported_data = get_probe_data( probe=probe, start_date=start.strftime('%d/%m/%Y'), start_hour=start.hour, duration=4) imported_data.data.dropna(inplace=True) radius = imported_data.data.loc[event - timedelta(minutes=4):event, 'r_sun'][0] duration, event_start, event_end = find_intervals( imported_data, event) left_interval_end, right_interval_start = event_start, event_end left_interval_start = event_start - timedelta(minutes=5) right_interval_end = event_end + timedelta(minutes=5) b_l_left, b_l_right, n_left, n_right, L, M, N = get_n_b( event, probe, imported_data, left_interval_start, left_interval_end, right_interval_start, right_interval_end) if use_2_b: b_l, n = [b_l_left, b_l_right], [n_left, n_right] else: b_l, n = [(b_l_left + b_l_right) / 2], [(n_left + n_right) / 2] delta_t, dt_perp, dt_par = find_temperature( imported_data, b_l, n, left_interval_start, left_interval_end, right_interval_start, right_interval_end) predicted_increase, alfven_speed = find_predicted_temperature( b_l, n) if 0.8 * delta_t <= predicted_increase * 0.13 <= 1.2 * delta_t: satisfied_test += 1 if radius < 0.5: if [event, probe] in small_shear: s = 'small' elif [event, probe] in big_shear: s = 'big' else: s = 'medium' print('small radius', radius, s) if delta_t > 15: print('DELTA T > 15', delta_t, event, radius) if delta_t < 0: print('delta t smaller than 0 ', delta_t, event, radius) if [event, probe] in small_shear: total_t.append( [predicted_increase, delta_t, 'r', 'small shear']) par_t.append([predicted_increase, dt_par, 'r', 'small shear ']) perp_t.append( [predicted_increase, dt_perp, 'r', 'small shear ']) t_diff.append([dt_par, dt_perp, 'r', 'small shear ']) elif [event, probe] in big_shear: total_t.append([predicted_increase, delta_t, 'b', 'big shear']) par_t.append([predicted_increase, dt_par, 'b', 'big shear']) perp_t.append([predicted_increase, dt_perp, 'b', 'big shear']) t_diff.append([dt_par, dt_perp, 'b', 'big shear']) else: total_t.append( [predicted_increase, delta_t, 'g', 'medium shear']) par_t.append([predicted_increase, dt_par, 'g', 'medium shear']) perp_t.append( [predicted_increase, dt_perp, 'g', 'medium shear']) t_diff.append([dt_par, dt_perp, 'g', 'medium shear']) except ValueError: print('value error') print('satisfied test: ', satisfied_test) slopes = plot_relations([[ total_t, 'Proton temperature change versus ' + r'$mv_A^2$' ], [ par_t, 'Parallel proton temperature change versus ' + r'$mv_A^2$' ], [ perp_t, 'Perpendicular proton temperature change versus ' + r'$mv_A^2$' ], [t_diff, 'Perpendicular versus parallel proton temperature changes']], 0.13) return slopes
color=colour) x_format = md.DateFormatter('%d/%m \n %H:%M') ax.xaxis.set_major_formatter(x_format) if column_name == 'Tp_par': ax.yaxis.set_label_position("right") ax.xaxis.set_ticklabels([]) ax.set_ylabel(tex_escape(column_name), color=colour) ax.grid() if __name__ == '__main__': # data = get_probe_data(probe='wind', start_date='01/01/2002', start_hour=12, duration=15) # data = get_probe_data(probe=1, start_date='01/12/1976', start_hour=4, duration=5) # data = get_probe_data(probe=1, start_date='05/03/1975', start_hour=0, duration=7) data = get_probe_data(probe=1, start_date='19/01/1979', start_hour=20, duration=3) # data = get_probe_data(probe=1, start_date='29/05/1981', start_hour=12, duration=6) # data = get_probe_data(probe='ulysses', start_date='09/02/1998', duration=24) # data = get_probe_data(probe='ulysses', start_date='15/02/2003', start_hour=20, duration=6) # plot_imported_data(data, columns_to_plot=DEFAULT_PLOTTED_COLUMNS, # boundaries=[datetime(1976, 12, 1, 5, 49), datetime(1976, 12, 1, 6, 12), # datetime(1976, 12, 1, 7, 16), # datetime(1976, 12, 1, 6, 23), datetime(1976, 12, 1, 7, 31)]) plot_imported_data(data, columns_to_plot=DEFAULT_PLOTTED_COLUMNS, boundaries=[datetime(1979, 1, 19, 21, 27)]) # plot_imported_data(data, columns_to_plot=['n_p', ('Bx', 'vp_x'), ('By', 'vp_y'), ('Bz', 'vp_z'), # ('b_magnitude', 'vp_magnitude')])
def plot_current_sheet(event: List[np.ndarray], weird: List[np.ndarray], event_date: datetime, weird_date: datetime, probe: int): """ Plots the current sheets and the spacecraft trajectory between them :param event: LMN coordinates for event :param weird: LMN coordinates for weird event :param event_date: event date :param weird_date: weird event date :param probe: 1 or 2 for Helios 1 or 2 :return: """ if weird_date < event_date: start = weird_date first, end = weird, event print('EVENT IN MAGENTA, WEIRD IN BLUE') future = False else: start = event_date first, end = event, weird print('EVENT IN BLUE, WEIRD IN MAGENTA') future = True start_date = start - timedelta(hours=1) imported_data = get_probe_data(probe=probe, start_date=start_date.strftime('%d/%m/%Y'), start_hour=start_date.hour, duration=3) imported_data.create_processed_column('vp_magnitude') t = np.abs((weird_date - event_date).total_seconds()) v = np.mean(imported_data.data.loc[start:start + timedelta(seconds=t), 'vp_magnitude']) distance = t * v print('distance', distance) fig = plt.figure(1) ax = fig.add_subplot(111, projection='3d', aspect='equal') plt.title(str(event_date), y=1.05) ax.set_xlabel('$X$', rotation=150) ax.set_ylabel('$Y$') ax.set_zlabel('$Z$', rotation=60) # plt.locator_params(nbins=3) ax.xaxis.set_ticklabels([]) ax.yaxis.set_ticklabels([]) ax.zaxis.set_ticklabels([]) normal_1, normal_2 = first[2], end[2] d = find_d_from_distance(distance, normal_2) xx, yy = np.meshgrid( np.arange(0, 3 * distance, np.int(distance)) - 1.5 * distance, np.arange(0, 3 * distance, np.int(distance)) - 1.5 * distance) z1 = (-normal_1[0] * xx - normal_1[1] * yy) * 1. / normal_1[2] if future: z2 = (-normal_2[0] * xx - normal_2[1] * yy - d) * 1. / normal_2[2] else: z2 = (-normal_2[0] * xx - normal_2[1] * yy + d) * 1. / normal_2[2] ax.plot_surface(xx, yy, z1, alpha=0.2, color='b') ax.plot_surface(xx, yy, z2, alpha=0.5, color='m') starting_position = [0, 0, 0] trajectory = find_spacecraft_trajectory(imported_data, t, start, starting_position, future) x, y, z = [pos[0] for pos in trajectory], [pos[1] for pos in trajectory ], [pos[2] for pos in trajectory] ax.scatter(x, y, z) ax.scatter(x[0], y[0], z[0], color='k') ax.scatter(x[9], y[9], z[9], color='k') distance_plane_to_point = ( normal_2[0] * x[-1] + normal_2[1] * y[-1] + normal_2[2] * z[-1] + d) / np.sqrt(normal_2[0]**2 + normal_2[1]**2 + normal_2[2]**2) print('distance from 2: ', distance_plane_to_point) b_and_v_plotting(ax, imported_data, event_date, weird_date, starting_position, future, event, weird) add_m_n_vectors(ax, event, weird, distance, future) plt.show()
def test_reconnection_lmn(event_dates: List[datetime], probe: Union[int, str], minimum_fraction: float, maximum_fraction: float, plot: bool = False, mode: str = 'static') -> List[datetime]: """ Checks a list of type datetime to determine whether they are reconnection events :param event_dates: list of possible reconnection dates :param probe: probe to be analysed :param minimum_fraction: minimum walen fraction :param maximum_fraction: maximum walen fraction :param plot: bool, true of we want to plot reconnection events that passed the test :param mode: interactive (human input to the code, more precise but time consuming) or static (purely computational) :return: all events that managed to pass the lmn tests """ implemented_modes = ['static', 'interactive'] if mode not in implemented_modes: raise NotImplementedError('This mode is not implemented.') duration = 4 events_that_passed_test = [] known_events = [] # get_dates_from_csv('helios2_magrec2.csv') rogue_events = [] # if mode == 'interactive' for event_date in event_dates: try: start_time = event_date - timedelta(hours=duration / 2) imported_data = get_probe_data( probe=probe, start_date=start_time.strftime('%d/%m/%Y'), start_hour=start_time.hour, duration=duration) imported_data.data.dropna(inplace=True) if probe == 1 or probe == 2 or probe == 'imp_8' or probe == 'ace' or probe == 'wind': b = get_b(imported_data, event_date, 30) L, M, N = mva(b) b1, b2, v1, v2, density_1, density_2, t_par_1, t_perp_1, t_par_2, t_perp_2 = get_side_data( imported_data, event_date, 10, 2) min_len = 70 elif probe == 'ulysses': b = get_b(imported_data, event_date, 60) L, M, N = mva(b) b1, b2, v1, v2, density_1, density_2, t_par_1, t_perp_1, t_par_2, t_perp_2 = get_side_data( imported_data, event_date, 30, 10) min_len = 5 else: raise NotImplementedError( 'The probes that have been implemented so far are Helios 1, Helios 2, Imp 8, Ace, Wind and Ulysses' ) L, M, N = hybrid(L, b1, b2) logger.debug('LMN:', L, M, N, np.dot(L, M), np.dot(L, N), np.dot(M, N), np.dot(np.cross(L, M), N)) b1_changed, b2_changed, v1_changed, v2_changed = change_b_and_v( b1, b2, v1, v2, L, M, N) b1_L, b2_L, b1_M, b2_M = b1_changed[0], b2_changed[0], b1_changed[ 1], b2_changed[1] v1_L, v2_L = v1_changed[0], v2_changed[0] walen = walen_test(b1_L, b2_L, v1_L, v2_L, density_1, density_2, minimum_fraction, maximum_fraction) bl_check = b_l_biggest(b1_L, b2_L, b1_M, b2_M) b_and_v_checks = changes_in_b_and_v(b1_changed, b2_changed, v1_changed, v2_changed, imported_data, event_date, L) logger.debug(walen, bl_check, b_and_v_checks) if walen and bl_check and len( imported_data.data ) > min_len and b_and_v_checks: # avoid not enough data points logger.info('RECONNECTION ON ', str(event_date)) if mode == 'static': events_that_passed_test.append(event_date) elif mode == 'interactive': answered = False plot_lmn(imported_data, L, M, N, event_date, probe, boundaries=None) while not answered: is_event = str(input('Do you think this is an event?')) is_event.lower() if is_event[0] == 'y': answered = True events_that_passed_test.append(event_date) elif is_event[0] == 'n': answered = True rogue_events.append(event_date) else: print('Please reply by yes or no') if plot and mode == 'static' and event_date not in known_events: plot_lmn(imported_data, L, M, N, event_date, probe=probe, save=True) else: logger.info('NO RECONNECTION ON ', str(event_date)) except ValueError: logger.debug('could not take care of mva analysis') if mode == 'interactive': print('rogue events: ', rogue_events) return events_that_passed_test