def process_bs_hist_stats(fps_bs, bs_params, analysis_fp=None, update_previous_analysis_fp=None): p_bs = bs_params if update_previous_analysis_fp!=None: f = h5py.File(update_previous_analysis_fp,'r') hist_stats = f['analysis']['bs_hist_stats'].value total_hist = f['analysis']['bs_total_hist'].value.T total_ent_events_fps = f['analysis']['total_ent_events_fps'].value noofs_fps = len(fps_bs) fps_bs = np.setdiff1d(fps_bs,total_ent_events_fps) noof_old_fps = noofs_fps - len(fps_bs) hist_stats = np.resize(hist_stats,(noofs_fps, be._bs_hist_stats_noof_columns)) f.close() else: hist_stats = np.zeros((len(fps_bs), be._bs_hist_stats_noof_columns), dtype=be._bs_hist_stats_dtype) total_hist = None noof_old_fps = 0 print 'Processing tail {} files'.format(len(fps_bs)) for i,fp in enumerate(fps_bs): print i, hi_0, hi_1, hist_stats[i+noof_old_fps,:] = be.get_bs_hist_stats(fp, st_start_ch0 = p_bs['st_start_ch0'], st_start_ch1 = p_bs['st_start_ch1'], st_len = p_bs['st_len'], pulse_sep = p_bs['pulse_sep'], st_pulse_start = p_bs['st_pulse_start'], st_pulse_len = p_bs['st_pulse_len'], hist_binsize_ps =p_bs['hist_binsize_ps'] ) if total_hist == None: total_hist = np.vstack((hi_0,hi_1)) else: total_hist += np.vstack((hi_0,hi_1)) if analysis_fp!=None: tb.set_analysis_data(analysis_fp, 'bs_hist_stats', data=hist_stats, attributes=[], permissions='a') tb.set_analysis_data(analysis_fp, 'bs_total_hist', data=total_hist.T, attributes=[], permissions='a') print 'Done!' return total_hist.T,hist_stats
def process_afterpulsing_data(fps_bs, afterpulsing_params, analysis_fp=None, update_previous_analysis_fp=None): p_ap = afterpulsing_params if update_previous_analysis_fp!=None: f = h5py.File(update_previous_analysis_fp,'r') all_afterpulsing = f['analysis']['afterpulsing'].value total_ent_events_fps = f['analysis']['total_ent_events_fps'].value noofs_fps = len(fps_bs) fps_bs = np.setdiff1d(fps_bs,total_ent_events_fps) noof_old_fps = noofs_fps - len(fps_bs) all_afterpulsing = np.resize(all_afterpulsing,(noofs_fps, be._bs_afterpulsing_noof_columns)) f.close() else: all_afterpulsing = np.zeros((len(fps_bs),be._bs_afterpulsing_noof_columns), dtype=be._bs_afterpulsing_dtype) noof_old_fps = 0 print 'Processing afterpulsing {} files'.format(len(fps_bs)) for i,fp in enumerate(fps_bs): print i, all_afterpulsing[i] += be.get_bs_afterpulsing(fp, first_st_start = p_ap['first_st_start'], first_st_len = p_ap['first_st_len'], after_st_start = p_ap['after_pulse_st_start'], after_st_len = p_ap['after_pulse_st_len']) if analysis_fp!=None: tb.set_analysis_data(analysis_fp, 'afterpulsing', data=all_afterpulsing, attributes=[], permissions='a') print 'Done!' return all_afterpulsing
def process_tpqi_data(fps_bs, bs_params, analysis_fp=None, update_previous_analysis_fp=None): p_bs = bs_params if update_previous_analysis_fp != None: f = h5py.File(update_previous_analysis_fp, 'r') all_coincidences = f['analysis']['tpqi'].value total_ent_events_fps = f['analysis']['total_ent_events_fps'].value fps_bs = np.setdiff1d(fps_bs, total_ent_events_fps) f.close() else: all_coincidences = np.empty((0, be._tpqi_noof_columns), dtype=be._tpqi_dtype) print 'Processing TPQI {} files'.format(len(fps_bs)) for i, fp in enumerate(fps_bs): print i, coincidences = be.get_coincidences(fp, st_start_ch0=p_bs['st_start_ch0'], st_start_ch1=p_bs['st_start_ch1'], st_len=p_bs['st_len'], pulse_sep=p_bs['pulse_sep']) all_coincidences = np.vstack((all_coincidences, coincidences)) if analysis_fp != None: tb.set_analysis_data(analysis_fp, 'tpqi', data=all_coincidences, attributes=[], permissions='a') print 'Done!' return all_coincidences
def process_lt_stats(fps_lt, lt_params, lt3, analysis_fp=None, update_previous_analysis_fp=None): p_lt = lt_params psb_tail_start = p_lt['psb_tail_start_lt3'] if lt3 else p_lt['psb_tail_start_lt4'] psb_prepulse_start = p_lt['psb_prepulse_start_lt3'] if lt3 else p_lt['psb_prepulse_start_lt4'] if update_previous_analysis_fp!=None: f = h5py.File(update_previous_analysis_fp,'r') all_lt_stats = f['analysis']['lt3_stats'].value if lt3 else f['analysis']['lt4_stats'].value total_lt_fps = f['analysis']['total_lt3_ssro_fps'].value if lt3 else f['analysis']['total_lt4_ssro_fps'].value noofs_fps = len(fps_lt) fps_lt = np.setdiff1d(fps_lt,total_lt_fps) noof_old_fps = noofs_fps - len(fps_lt) all_lt_stats = np.resize(all_lt_stats,(noofs_fps, be._lt_stats_noof_columns)) f.close() else: all_lt_stats = np.zeros((len(fps_lt),be._lt_stats_noof_columns), dtype=be._lt_stats_dtype) noof_old_fps = 0 print 'Processing LT {} files'.format(len(fps_lt)) for i,fp in enumerate(fps_lt): print i, all_lt_stats[i+noof_old_fps,:]= be.get_lt_stats(fp, ro_start = p_lt['ro_start'], ro_length = p_lt['ro_length'], ro_channel = p_lt['ro_channel'], rnd_start = p_lt['rnd_start'], rnd_length = p_lt['rnd_length'], rnd_0_channel = p_lt['rnd_0_channel'], rnd_1_channel = p_lt['rnd_1_channel'], psb_tail_start = psb_tail_start, psb_tail_len = p_lt['psb_tail_len'], pulse_sep = p_lt['pulse_sep'], psb_prepulse_start = psb_prepulse_start, psb_prepulse_len = p_lt['psb_prepulse_len']) if analysis_fp!=None: tb.set_analysis_data(analysis_fp, 'lt3_stats' if lt3 else 'lt4_stats', data=all_lt_stats, attributes=[], permissions='a') print 'Done!' return all_lt_stats
def process_lt_stats(fps_lt, lt_params, lt3, analysis_fp=None, update_previous_analysis_fp=None): p_lt = lt_params psb_tail_start = p_lt['psb_tail_start_lt3'] if lt3 else p_lt[ 'psb_tail_start_lt4'] psb_prepulse_start = p_lt['psb_prepulse_start_lt3'] if lt3 else p_lt[ 'psb_prepulse_start_lt4'] if update_previous_analysis_fp != None: f = h5py.File(update_previous_analysis_fp, 'r') all_lt_stats = f['analysis']['lt3_stats'].value if lt3 else f[ 'analysis']['lt4_stats'].value total_lt_fps = f['analysis']['total_lt3_ssro_fps'].value if lt3 else f[ 'analysis']['total_lt4_ssro_fps'].value noofs_fps = len(fps_lt) fps_lt = np.setdiff1d(fps_lt, total_lt_fps) noof_old_fps = noofs_fps - len(fps_lt) all_lt_stats = np.resize(all_lt_stats, (noofs_fps, be._lt_stats_noof_columns)) f.close() else: all_lt_stats = np.zeros((len(fps_lt), be._lt_stats_noof_columns), dtype=be._lt_stats_dtype) noof_old_fps = 0 print 'Processing LT {} files'.format(len(fps_lt)) for i, fp in enumerate(fps_lt): print i, all_lt_stats[i + noof_old_fps, :] = be.get_lt_stats( fp, ro_start=p_lt['ro_start'], ro_length=p_lt['ro_length'], ro_channel=p_lt['ro_channel'], rnd_start=p_lt['rnd_start'], rnd_length=p_lt['rnd_length'], rnd_0_channel=p_lt['rnd_0_channel'], rnd_1_channel=p_lt['rnd_1_channel'], psb_tail_start=psb_tail_start, psb_tail_len=p_lt['psb_tail_len'], pulse_sep=p_lt['pulse_sep'], psb_prepulse_start=psb_prepulse_start, psb_prepulse_len=p_lt['psb_prepulse_len']) if analysis_fp != None: tb.set_analysis_data(analysis_fp, 'lt3_stats' if lt3 else 'lt4_stats', data=all_lt_stats, attributes=[], permissions='a') print 'Done!' return all_lt_stats
def process_bs_hist_stats(fps_bs, bs_params, analysis_fp=None, update_previous_analysis_fp=None): p_bs = bs_params if update_previous_analysis_fp != None: f = h5py.File(update_previous_analysis_fp, 'r') hist_stats = f['analysis']['bs_hist_stats'].value total_hist = f['analysis']['bs_total_hist'].value.T total_ent_events_fps = f['analysis']['total_ent_events_fps'].value noofs_fps = len(fps_bs) fps_bs = np.setdiff1d(fps_bs, total_ent_events_fps) noof_old_fps = noofs_fps - len(fps_bs) hist_stats = np.resize(hist_stats, (noofs_fps, be._bs_hist_stats_noof_columns)) f.close() else: hist_stats = np.zeros((len(fps_bs), be._bs_hist_stats_noof_columns), dtype=be._bs_hist_stats_dtype) total_hist = None noof_old_fps = 0 print 'Processing tail {} files'.format(len(fps_bs)) for i, fp in enumerate(fps_bs): print i, hi_0, hi_1, hist_stats[i + noof_old_fps, :] = be.get_bs_hist_stats( fp, st_start_ch0=p_bs['st_start_ch0'], st_start_ch1=p_bs['st_start_ch1'], st_len=p_bs['st_len'], pulse_sep=p_bs['pulse_sep'], st_pulse_start=p_bs['st_pulse_start'], st_pulse_len=p_bs['st_pulse_len'], hist_binsize_ps=p_bs['hist_binsize_ps']) if total_hist == None: total_hist = np.vstack((hi_0, hi_1)) else: total_hist += np.vstack((hi_0, hi_1)) if analysis_fp != None: tb.set_analysis_data(analysis_fp, 'bs_hist_stats', data=hist_stats, attributes=[], permissions='a') tb.set_analysis_data(analysis_fp, 'bs_total_hist', data=total_hist.T, attributes=[], permissions='a') print 'Done!' return total_hist.T, hist_stats
def re_analyze_SSRO_data(pqf, marker_chan ,RO_start, RO_length, chan_rnd_0, chan_rnd_1, sync_time_lim, VERBOSE = True ): # Define all block names sync_time_name = '/PQ_sync_time' sync_num_name = '/PQ_sync_number' spec_name = '/PQ_special' chan_name = '/PQ_channel' time_name = '/PQ_time' if type(pqf) == h5py._hl.files.File: PQ_sync_number = pqf[sync_num_name].value PQ_sync_time = pqf[sync_time_name].value PQ_time = pqf[time_name].value PQ_channel = pqf[chan_name].value PQ_special = pqf[spec_name].value elif type(pqf) == str: f = h5py.File(pqf, 'r') PQ_sync_number = f[sync_num_name].value PQ_sync_time = f[sync_time_name].value PQ_time = f[time_name].value PQ_channel = f[chan_name].value PQ_special = f[spec_name].value f.close() else: print "Neither filepath nor file enetered in function please check:", pqf raise # Initializes an array to save all SSRO data SSRO_events = np.empty((0,30), dtype = np.uint64) _a = get_attributes(pqf) is_mrkr = ((PQ_special==1) & (PQ_channel==marker_chan)) num_mrkr = np.sum(is_mrkr) unique_sync_num_with_markers = get_unique_sync_with_mrkr_quick(pqf, is_mrkr,sync_num_name, sync_time_name, sync_time_lim, num_mrkr) SSRO_events = Analysis.Analyze_SSRO_data(PQ_sync_number, PQ_special, PQ_sync_time, \ PQ_time, PQ_channel, RO_start, RO_length, marker_chan, \ unique_sync_num_with_markers, chan_rnd_0, chan_rnd_1, sync_time_lim = sync_time_lim) tb.clear_analysis_data(pqf, 'Total_SSRO_events') tb.set_analysis_data(pqf, 'Total_SSRO_events', SSRO_events, _a) return SSRO_events
def process_afterpulsing_data(fps_bs, afterpulsing_params, analysis_fp=None, update_previous_analysis_fp=None): p_ap = afterpulsing_params if update_previous_analysis_fp != None: f = h5py.File(update_previous_analysis_fp, 'r') all_afterpulsing = f['analysis']['afterpulsing'].value total_ent_events_fps = f['analysis']['total_ent_events_fps'].value noofs_fps = len(fps_bs) fps_bs = np.setdiff1d(fps_bs, total_ent_events_fps) noof_old_fps = noofs_fps - len(fps_bs) all_afterpulsing = np.resize( all_afterpulsing, (noofs_fps, be._bs_afterpulsing_noof_columns)) f.close() else: all_afterpulsing = np.zeros( (len(fps_bs), be._bs_afterpulsing_noof_columns), dtype=be._bs_afterpulsing_dtype) noof_old_fps = 0 print 'Processing afterpulsing {} files'.format(len(fps_bs)) for i, fp in enumerate(fps_bs): print i, all_afterpulsing[i] += be.get_bs_afterpulsing( fp, first_st_start=p_ap['first_st_start'], first_st_len=p_ap['first_st_len'], after_st_start=p_ap['after_pulse_st_start'], after_st_len=p_ap['after_pulse_st_len']) if analysis_fp != None: tb.set_analysis_data(analysis_fp, 'afterpulsing', data=all_afterpulsing, attributes=[], permissions='a') print 'Done!' return all_afterpulsing
def process_tpqi_data(fps_bs, bs_params, analysis_fp=None, update_previous_analysis_fp=None): p_bs = bs_params if update_previous_analysis_fp!=None: f = h5py.File(update_previous_analysis_fp,'r') all_coincidences = f['analysis']['tpqi'].value total_ent_events_fps = f['analysis']['total_ent_events_fps'].value fps_bs = np.setdiff1d(fps_bs,total_ent_events_fps) f.close() else: all_coincidences = np.empty((0,be._tpqi_noof_columns), dtype=be._tpqi_dtype) print 'Processing TPQI {} files'.format(len(fps_bs)) for i,fp in enumerate(fps_bs): print i, coincidences = be.get_coincidences(fp,st_start_ch0 = p_bs['st_start_ch0'], st_start_ch1 = p_bs['st_start_ch1'], st_len = p_bs['st_len'], pulse_sep = p_bs['pulse_sep']) all_coincidences = np.vstack((all_coincidences, coincidences)) if analysis_fp!=None: tb.set_analysis_data(analysis_fp, 'tpqi', data=all_coincidences, attributes=[], permissions='a') print 'Done!' return all_coincidences
def process_bell_data(fps_bs, lt3_folder, lt4_folder, bs_params, lt_params, analysis_fp=None, update_previous_analysis_fp=None, ignore_unequal_markers=False, process_lt3=True, process_lt4=True, VERBOSE=False): print 'Found {} filepaths'.format(len(fps_bs)) if update_previous_analysis_fp!=None: f = h5py.File(update_previous_analysis_fp,'r') total_ent_events = f['analysis']['total_ent_events'].value total_lt3_ssro = f['analysis']['total_lt3_ssro'].value total_lt4_ssro = f['analysis']['total_lt4_ssro'].value total_ent_events_fps = f['analysis']['total_ent_events_fps'].value total_lt3_ssro_fps = f['analysis']['total_lt3_ssro_fps'].value total_lt4_ssro_fps = f['analysis']['total_lt4_ssro_fps'].value fps_bs = np.setdiff1d(fps_bs,total_ent_events_fps) f.close() print '{} filepaths left to analyze'.format(len(fps_bs)) else: total_ent_events = np.empty((0,be._bs_noof_columns), dtype=np.uint64) total_lt3_ssro = np.empty((0,be._lt_noof_columns), dtype=np.uint64) total_lt4_ssro = np.empty((0,be._lt_noof_columns), dtype=np.uint64) total_ent_events_fps = [] total_lt3_ssro_fps = [] total_lt4_ssro_fps = [] fps_lt3, fps_lt4 = get_lt_fps(fps_bs, lt3_folder, lt4_folder) p_bs = bs_params p_lt = lt_params for i,fp_bs,fp_lt3,fp_lt4 in zip(range(len(fps_bs)),fps_bs,fps_lt3,fps_lt4): print i, ent_event_list = be.get_entanglement_event_list(fp_bs, st_start_ch0 = p_bs['st_start_ch0'], st_start_ch1 = p_bs['st_start_ch1'], st_len = p_bs['st_len'], pulse_sep = p_bs['pulse_sep'], st_pulse_start_ch0 = p_bs['st_pulse_start_ch0'],st_pulse_start_ch1 = p_bs['st_pulse_start_ch1'], st_pulse_len = p_bs['st_pulse_len'], pulse_max_sn_diff = p_bs['pulse_max_sn_diff'], ent_marker_channel_bs = p_bs['ent_marker_channel_bs'], VERBOSE=VERBOSE) lt3_ssro_list = be.get_ssro_result_list(fp_lt3, ro_start = p_lt['ro_start'], ro_length = p_lt['ro_length'], ro_channel = p_lt['ro_channel'], rnd_start = p_lt['rnd_start'], rnd_length = p_lt['rnd_length'], rnd_channel = p_lt['rnd_channel'], rnd_0_channel = p_lt['rnd_0_channel'], rnd_1_channel = p_lt['rnd_1_channel'], psb_tail_start = p_lt['psb_tail_start_lt3'], psb_tail_len = p_lt['psb_tail_len'], pulse_sep = p_bs['pulse_sep'], ent_marker_channel_lt = p_lt['ent_marker_channel_lt3'], ent_marker_lt_timebin_limit = p_lt['ent_marker_lt_timebin_limit'], sn_diff_marker_ent_early = p_lt['sn_diff_marker_ent_early'], sn_diff_marker_ent_late = p_lt['sn_diff_marker_ent_late'], invalid_marker_channel_lt = p_lt['invalid_marker_channel_lt'], VERBOSE = VERBOSE) if process_lt3 else np.zeros((len(ent_event_list),be._lt_noof_columns)) #lt3_ssro_list = be.get_ssro_result_list_adwin(fp_lt3, ssro_result_list=lt3_ssro_list) lt4_ssro_list = be.get_ssro_result_list(fp_lt4, ro_start = p_lt['ro_start'], ro_length = p_lt['ro_length'], ro_channel = p_lt['ro_channel'], rnd_start = p_lt['rnd_start'], rnd_length = p_lt['rnd_length'], rnd_channel = p_lt['rnd_channel'], rnd_0_channel = p_lt['rnd_0_channel'], rnd_1_channel = p_lt['rnd_1_channel'], psb_tail_start = p_lt['psb_tail_start_lt4'], psb_tail_len = p_lt['psb_tail_len'], pulse_sep = p_bs['pulse_sep'], ent_marker_channel_lt = p_lt['ent_marker_channel_lt4'], ent_marker_lt_timebin_limit = p_lt['ent_marker_lt_timebin_limit'], sn_diff_marker_ent_early = p_lt['sn_diff_marker_ent_early'], sn_diff_marker_ent_late = p_lt['sn_diff_marker_ent_late'], invalid_marker_channel_lt = p_lt['invalid_marker_channel_lt'], VERBOSE = VERBOSE) if process_lt4 else np.zeros((len(ent_event_list),be._lt_noof_columns)) #lt4_ssro_list = be.get_ssro_result_list_adwin(fp_lt4, ssro_result_list=lt4_ssro_list) if (len(ent_event_list) != len(lt3_ssro_list)) or (len(ent_event_list) != len(lt4_ssro_list)): print 'WARNING: measurement with filepath {}: Number of markers is unequal'.format(fp_bs) print 'BS markers: {}, LT3 markers: {}, LT4 markers: {}'.format(len(ent_event_list),len(lt3_ssro_list),len(lt4_ssro_list)) minlen=min((len(ent_event_list),len(lt3_ssro_list),len(lt4_ssro_list))) maxlen=max((len(ent_event_list),len(lt3_ssro_list),len(lt4_ssro_list))) if VERBOSE: print ent_event_list[:,be._cl_sn], lt3_ssro_list[:,be._cl_sn_ma], lt4_ssro_list[:,be._cl_sn_ma] if not ignore_unequal_markers: print 'File ignored' continue elif ignore_unequal_markers == 'fix_last': print 'trying to fix unequal markers by discarding last' if maxlen - minlen >1: print 'Fix failed, number of markers too different: {}!'.format(maxlen - minlen) continue if minlen == 0: continue ent_event_list = ent_event_list[:minlen,:] lt3_ssro_list = lt3_ssro_list[:minlen,:] lt4_ssro_list = lt4_ssro_list[:minlen,:] if abs(int(ent_event_list[-1,be._cl_sn])-int(lt4_ssro_list[-1,be._cl_sn_ma]))>250 or \ abs(int(ent_event_list[-1,be._cl_sn])-int(lt3_ssro_list[-1,be._cl_sn_ma]/250.*251))>250: print 'Fix failed, last marker sync number too different:\n \ BS: {}, LT3 (/250*251): {}. LT4: {}'.format(ent_event_list[-1,be._cl_sn], int(lt3_ssro_list[-1,be._cl_sn_ma]/250.*251), lt4_ssro_list[-1,be._cl_sn_ma]) continue print 'Fix suceeded' elif ignore_unequal_markers == 'append_zeros': print 'Appending empty events for missing markers' ent_event_list= np.vstack((ent_event_list, np.zeros((maxlen-len(ent_event_list),be._bs_noof_columns), dtype=be._bs_dtype))) lt3_ssro_list = np.vstack((lt3_ssro_list, np.zeros((maxlen-len(lt3_ssro_list),be._lt_noof_columns), dtype=be._lt_dtype))) lt4_ssro_list = np.vstack((lt4_ssro_list, np.zeros((maxlen-len(lt4_ssro_list),be._lt_noof_columns), dtype=be._lt_dtype))) if len(ent_event_list) > 50: print 'Measurement with filepath {} has more than 100 events'.format(fp_bs) total_ent_events = np.vstack((total_ent_events, ent_event_list)) total_lt3_ssro = np.vstack((total_lt3_ssro, lt3_ssro_list)) total_lt4_ssro = np.vstack((total_lt4_ssro, lt4_ssro_list)) for j in range(len(ent_event_list)): total_ent_events_fps = np.append(total_ent_events_fps,fp_bs) total_lt3_ssro_fps = np.append(total_lt3_ssro_fps,fp_lt3) total_lt4_ssro_fps = np.append(total_lt4_ssro_fps,fp_lt4) print 'Done, total_events:',len(total_ent_events) if analysis_fp!=None: tb.set_analysis_data(analysis_fp,'total_ent_events', data=total_ent_events, attributes=[], permissions='a') tb.set_analysis_data(analysis_fp,'total_lt3_ssro', data=total_lt3_ssro, attributes=[]) tb.set_analysis_data(analysis_fp,'total_lt4_ssro', data=total_lt4_ssro, attributes=[]) tb.set_analysis_data(analysis_fp,'total_ent_events_fps', data=total_ent_events_fps, attributes=[]) tb.set_analysis_data(analysis_fp,'total_lt3_ssro_fps', data=total_lt3_ssro_fps, attributes=[]) tb.set_analysis_data(analysis_fp,'total_lt4_ssro_fps', data=total_lt4_ssro_fps, attributes=[]) return total_ent_events, total_lt3_ssro, total_lt4_ssro, total_ent_events_fps, total_lt3_ssro_fps, total_lt3_ssro_fps, total_lt4_ssro_fps
def process_bell_data(fps_bs, lt3_folder, lt4_folder, bs_params, lt_params, analysis_fp=None, update_previous_analysis_fp=None, ignore_unequal_markers=False, process_lt3=True, process_lt4=True, VERBOSE=False): print 'Found {} filepaths'.format(len(fps_bs)) if update_previous_analysis_fp != None: f = h5py.File(update_previous_analysis_fp, 'r') total_ent_events = f['analysis']['total_ent_events'].value total_lt3_ssro = f['analysis']['total_lt3_ssro'].value total_lt4_ssro = f['analysis']['total_lt4_ssro'].value total_ent_events_fps = f['analysis']['total_ent_events_fps'].value total_lt3_ssro_fps = f['analysis']['total_lt3_ssro_fps'].value total_lt4_ssro_fps = f['analysis']['total_lt4_ssro_fps'].value fps_bs = np.setdiff1d(fps_bs, total_ent_events_fps) f.close() print '{} filepaths left to analyze'.format(len(fps_bs)) else: total_ent_events = np.empty((0, be._bs_noof_columns), dtype=np.uint64) total_lt3_ssro = np.empty((0, be._lt_noof_columns), dtype=np.uint64) total_lt4_ssro = np.empty((0, be._lt_noof_columns), dtype=np.uint64) total_ent_events_fps = [] total_lt3_ssro_fps = [] total_lt4_ssro_fps = [] fps_lt3, fps_lt4 = get_lt_fps(fps_bs, lt3_folder, lt4_folder) p_bs = bs_params p_lt = lt_params for i, fp_bs, fp_lt3, fp_lt4 in zip(range(len(fps_bs)), fps_bs, fps_lt3, fps_lt4): print i, ent_event_list = be.get_entanglement_event_list( fp_bs, st_start_ch0=p_bs['st_start_ch0'], st_start_ch1=p_bs['st_start_ch1'], st_len=p_bs['st_len'], pulse_sep=p_bs['pulse_sep'], st_pulse_start_ch0=p_bs['st_pulse_start_ch0'], st_pulse_start_ch1=p_bs['st_pulse_start_ch1'], st_pulse_len=p_bs['st_pulse_len'], pulse_max_sn_diff=p_bs['pulse_max_sn_diff'], ent_marker_channel_bs=p_bs['ent_marker_channel_bs'], VERBOSE=VERBOSE) lt3_ssro_list = be.get_ssro_result_list( fp_lt3, ro_start=p_lt['ro_start'], ro_length=p_lt['ro_length'], ro_channel=p_lt['ro_channel'], rnd_start=p_lt['rnd_start'], rnd_length=p_lt['rnd_length'], rnd_channel=p_lt['rnd_channel'], rnd_0_channel=p_lt['rnd_0_channel'], rnd_1_channel=p_lt['rnd_1_channel'], psb_tail_start=p_lt['psb_tail_start_lt3'], psb_tail_len=p_lt['psb_tail_len'], pulse_sep=p_bs['pulse_sep'], ent_marker_channel_lt=p_lt['ent_marker_channel_lt3'], ent_marker_lt_timebin_limit=p_lt['ent_marker_lt_timebin_limit'], sn_diff_marker_ent_early=p_lt['sn_diff_marker_ent_early'], sn_diff_marker_ent_late=p_lt['sn_diff_marker_ent_late'], invalid_marker_channel_lt=p_lt['invalid_marker_channel_lt'], VERBOSE=VERBOSE) if process_lt3 else np.zeros( (len(ent_event_list), be._lt_noof_columns)) #lt3_ssro_list = be.get_ssro_result_list_adwin(fp_lt3, ssro_result_list=lt3_ssro_list) lt4_ssro_list = be.get_ssro_result_list( fp_lt4, ro_start=p_lt['ro_start'], ro_length=p_lt['ro_length'], ro_channel=p_lt['ro_channel'], rnd_start=p_lt['rnd_start'], rnd_length=p_lt['rnd_length'], rnd_channel=p_lt['rnd_channel'], rnd_0_channel=p_lt['rnd_0_channel'], rnd_1_channel=p_lt['rnd_1_channel'], psb_tail_start=p_lt['psb_tail_start_lt4'], psb_tail_len=p_lt['psb_tail_len'], pulse_sep=p_bs['pulse_sep'], ent_marker_channel_lt=p_lt['ent_marker_channel_lt4'], ent_marker_lt_timebin_limit=p_lt['ent_marker_lt_timebin_limit'], sn_diff_marker_ent_early=p_lt['sn_diff_marker_ent_early'], sn_diff_marker_ent_late=p_lt['sn_diff_marker_ent_late'], invalid_marker_channel_lt=p_lt['invalid_marker_channel_lt'], VERBOSE=VERBOSE) if process_lt4 else np.zeros( (len(ent_event_list), be._lt_noof_columns)) #lt4_ssro_list = be.get_ssro_result_list_adwin(fp_lt4, ssro_result_list=lt4_ssro_list) if (len(ent_event_list) != len(lt3_ssro_list)) or ( len(ent_event_list) != len(lt4_ssro_list)): print 'WARNING: measurement with filepath {}: Number of markers is unequal'.format( fp_bs) print 'BS markers: {}, LT3 markers: {}, LT4 markers: {}'.format( len(ent_event_list), len(lt3_ssro_list), len(lt4_ssro_list)) minlen = min( (len(ent_event_list), len(lt3_ssro_list), len(lt4_ssro_list))) maxlen = max( (len(ent_event_list), len(lt3_ssro_list), len(lt4_ssro_list))) if VERBOSE: print ent_event_list[:, be. _cl_sn], lt3_ssro_list[:, be. _cl_sn_ma], lt4_ssro_list[:, be . _cl_sn_ma] if not ignore_unequal_markers: print 'File ignored' continue elif ignore_unequal_markers == 'fix_last': print 'trying to fix unequal markers by discarding last' if maxlen - minlen > 1: print 'Fix failed, number of markers too different: {}!'.format( maxlen - minlen) continue if minlen == 0: continue ent_event_list = ent_event_list[:minlen, :] lt3_ssro_list = lt3_ssro_list[:minlen, :] lt4_ssro_list = lt4_ssro_list[:minlen, :] if abs(int(ent_event_list[-1,be._cl_sn])-int(lt4_ssro_list[-1,be._cl_sn_ma]))>250 or \ abs(int(ent_event_list[-1,be._cl_sn])-int(lt3_ssro_list[-1,be._cl_sn_ma]/250.*251))>250: print 'Fix failed, last marker sync number too different:\n \ BS: {}, LT3 (/250*251): {}. LT4: {}'.format( ent_event_list[-1, be._cl_sn], int(lt3_ssro_list[-1, be._cl_sn_ma] / 250. * 251), lt4_ssro_list[-1, be._cl_sn_ma]) continue print 'Fix suceeded' elif ignore_unequal_markers == 'append_zeros': print 'Appending empty events for missing markers' ent_event_list = np.vstack( (ent_event_list, np.zeros( (maxlen - len(ent_event_list), be._bs_noof_columns), dtype=be._bs_dtype))) lt3_ssro_list = np.vstack( (lt3_ssro_list, np.zeros( (maxlen - len(lt3_ssro_list), be._lt_noof_columns), dtype=be._lt_dtype))) lt4_ssro_list = np.vstack( (lt4_ssro_list, np.zeros( (maxlen - len(lt4_ssro_list), be._lt_noof_columns), dtype=be._lt_dtype))) if len(ent_event_list) > 50: print 'Measurement with filepath {} has more than 100 events'.format( fp_bs) total_ent_events = np.vstack((total_ent_events, ent_event_list)) total_lt3_ssro = np.vstack((total_lt3_ssro, lt3_ssro_list)) total_lt4_ssro = np.vstack((total_lt4_ssro, lt4_ssro_list)) for j in range(len(ent_event_list)): total_ent_events_fps = np.append(total_ent_events_fps, fp_bs) total_lt3_ssro_fps = np.append(total_lt3_ssro_fps, fp_lt3) total_lt4_ssro_fps = np.append(total_lt4_ssro_fps, fp_lt4) print 'Done, total_events:', len(total_ent_events) if analysis_fp != None: tb.set_analysis_data(analysis_fp, 'total_ent_events', data=total_ent_events, attributes=[], permissions='a') tb.set_analysis_data(analysis_fp, 'total_lt3_ssro', data=total_lt3_ssro, attributes=[]) tb.set_analysis_data(analysis_fp, 'total_lt4_ssro', data=total_lt4_ssro, attributes=[]) tb.set_analysis_data(analysis_fp, 'total_ent_events_fps', data=total_ent_events_fps, attributes=[]) tb.set_analysis_data(analysis_fp, 'total_lt3_ssro_fps', data=total_lt3_ssro_fps, attributes=[]) tb.set_analysis_data(analysis_fp, 'total_lt4_ssro_fps', data=total_lt4_ssro_fps, attributes=[]) return total_ent_events, total_lt3_ssro, total_lt4_ssro, total_ent_events_fps, total_lt3_ssro_fps, total_lt3_ssro_fps, total_lt4_ssro_fps
def get_Bell_events(fp_BS,fp_LT3,fp_LT4, BS_marker_chan, first_win_min, first_win_max, second_win_min, second_win_max, force_eval=False, VERBOSE = True): """ Returns either the entanglement events already saved; the corresponding attributes and save = False, or returns the newly calculated entanglement events; the corresponding attributes (column names) and the save = True. Put in the file path of the BS then of LT3 then of LT4 then the marker channel and then the repitition if it's looped. Also put in the windows to determine if an photon is the first photon to arrive. (first_win_min,first_win_max,second_win_min, second_win_max) """ folder, name = os.path.split(tb.get_msmt_header(fp_BS)) if VERBOSE: print name if tb.has_analysis_data(fp_LT3, 'Total_SSRO_events'): Total_SSRO_events_LT3, _att_LT3 = tb.get_analysis_data(fp_LT3, 'Total_SSRO_events') if tb.has_analysis_data(fp_LT4, 'Total_SSRO_events'): Total_SSRO_events_LT4, _att_LT4 = tb.get_analysis_data(fp_LT4, 'Total_SSRO_events') if tb.has_analysis_data(fp_BS, 'Entanglement_events') and not force_eval: entanglement_events, _a = tb.get_analysis_data(fp_BS, 'Entanglement_events') if force_eval or not tb.has_analysis_data(fp_BS, 'Entanglement_events'): # Opens beamsplitter data f = h5py.File(fp_BS, 'r') sync_times = f['/PQ_sync_time-1'].value sync_numbers = f['/PQ_sync_number-1'].value channel = f['/PQ_channel-1'].value abs_times = f['/PQ_time-1'].value f.close() sync_num_with_markers = sync_numbers[pq_tools.filter_marker(fp_BS,BS_marker_chan, VERBOSE = False)] unique_sync_num_with_markers = np.unique(sync_num_with_markers) if (len(Total_SSRO_events_LT3[:,2]) == len(unique_sync_num_with_markers)) & \ (len(Total_SSRO_events_LT4[:,2]) == len(unique_sync_num_with_markers)): print print print "The number of markers matches and is:", len(unique_sync_num_with_markers) print "=======================================================================" print elif '215201_Bell_BS_full_BellLFBT_day2_Run8' in fp_BS: sync_times_with_marker = sync_times[pq_tools.filter_marker(fp_BS,BS_marker_chan, VERBOSE = VERBOSE)] last_sync_time = sync_times_with_marker[len(sync_times_with_marker)-1] print print print "Filepath:", fp_BS print "The number of markers does not match!!!!" print "The number of BS markers is:", len(unique_sync_num_with_markers) print "We have lookend into this in the data and there is a corrupt marker in the BS data which has a sync time of", last_sync_time print "The number of LT3 markers is:", len(Total_SSRO_events_LT3[:,2]) print "The number of LT4 markers is:", len(Total_SSRO_events_LT4[:,2]) print "=======================================================================" print print else: print print print "Filepath:", fp_BS print "The number of markers does not match!!!!" print "The number of BS markers is:", len(unique_sync_num_with_markers) print "The number of LT3 markers is:", len(Total_SSRO_events_LT3[:,2]) print "The number of LT4 markers is:", len(Total_SSRO_events_LT4[:,2]) print "=======================================================================" print print #raise # Gets filters for photons with markers in the first and second window # from the Filter file is_photon_1st_window_with_markers, is_photon_2nd_window_with_markers =\ pq_tools.get_photons_with_markers(fp_BS, BS_marker_chan, first_win_min, first_win_max, second_win_min, second_win_max, VERBOSE = VERBOSE) # Gets filters for photons with markers in the first and second window # from the Filter file is_photon_1st_window_with_markers, is_photon_2nd_window_with_markers =\ pq_tools.get_photons_with_markers(fp_BS, BS_marker_chan, first_win_min, first_win_max, second_win_min, second_win_max) # Retrieves sync numbers and sync times for photons both in the first # and 2nd window Sync_num_1st_window_with_markers = sync_numbers[is_photon_1st_window_with_markers] channel_1st_window_with_markers = channel[is_photon_1st_window_with_markers] Sync_times_1st_window_with_markers = sync_times[is_photon_1st_window_with_markers] Sync_num_2nd_window_with_markers = sync_numbers[is_photon_2nd_window_with_markers] channel_2nd_window_with_markers = channel[is_photon_2nd_window_with_markers] Sync_times_2nd_window_with_markers = sync_times[is_photon_2nd_window_with_markers] # Defines a filter for all events with markers is_all_markers = is_photon_1st_window_with_markers | is_photon_2nd_window_with_markers # Gets the absolute times for all events with makers PLU_mrkr_abs_times = abs_times[is_all_markers] #Initializes the final array of entanglement events entanglement_events = np.empty((0,7), np.uint64) columns = "Sync_Number, Sync_Time_photon_1, Sync_Time_photon_2, Channel_photon_1,\ Channel_photon_2, psiminus, abs_time" _a = {'Columns': columns} # Get all real entanglement events, loops over sync numbers for i,s in enumerate(unique_sync_num_with_markers): # The attempt is defined as the sync number modulo 250 #(250 = the repitition rate) # Return filters for specific sync number s is_ph_1st_win_sync_num_s = Sync_num_1st_window_with_markers == s is_ph_2nd_win_sync_num_s = Sync_num_2nd_window_with_markers == s # Test if there is one photon in both windows if len(Sync_num_1st_window_with_markers[is_ph_1st_win_sync_num_s]) == 1 \ and len(Sync_num_2nd_window_with_markers[is_ph_2nd_win_sync_num_s]) == 1: # Saves sync times an channels of both photons stimes = np.array([ Sync_times_1st_window_with_markers[is_ph_1st_win_sync_num_s],\ Sync_times_2nd_window_with_markers[is_ph_2nd_win_sync_num_s]]).reshape(-1) channel_1 = channel_1st_window_with_markers[is_ph_1st_win_sync_num_s] channel_2 = channel_2nd_window_with_markers[is_ph_2nd_win_sync_num_s] chans = np.array([channel_1,channel_2]) # Determines if event is psiplus or psiminus if channel_1 == channel_2: psiminus = 0 else: psiminus = 1 # Test if there are two photons in the first window elif len(Sync_num_1st_window_with_markers[is_ph_1st_win_sync_num_s]) == 2 and \ len(Sync_num_2nd_window_with_markers[is_ph_2nd_win_sync_num_s]) == 0: # Saves sync times an channels of both photons stimes = Sync_times_1st_window_with_markers[is_ph_1st_win_sync_num_s] chans = Channel_1st_window_with_markers[is_ph_1st_win_sync_num_s] # Set psiminus to two meaning that there is no entanglement since both photons # are in first window psiminus = 2 # Test if there are two photons in the second window elif len(Sync_num_1st_window_with_markers[is_ph_1st_win_sync_num_s]) == 0 and \ len(Sync_num_2nd_window_with_markers[is_ph_2nd_win_sync_num_s]) == 2: # Saves sync times an channels of both photons stimes = Sync_times_2nd_window_with_markers[is_ph_2nd_win_sync_num_s] chans = Channel_2nd_window_with_markers[is_ph_2nd_win_sync_num_s] # Set psiminus to two meaning that there is no entanglement since both photons # are in second window psiminus = 3 # Disregards events with more than two photons else: continue """ Returns all entanglement events. Colums are: Sync Nymber BS | Sync Time Photon 1 BS | Sync Time photon 2 BS | Photon 1 Channel BS | Photon 2 Channel BS | psiminus | absolute time BS """ _event = np.array([s, stimes[0], stimes[1], chans[0], chans[1], psiminus, PLU_mrkr_abs_times[i]], dtype = np.uint64) entanglement_events = np.vstack((entanglement_events, _event)) if tb.has_analysis_data(fp_BS,'Entanglement_events'): tb.clear_analysis_data(fp_BS) tb.set_analysis_data(fp_BS, 'Entanglement_events', entanglement_events, _a) else: tb.set_analysis_data(fp_BS, 'Entanglement_events', entanglement_events, _a) if VERBOSE: print print 'Found {} valid entanglement events.'.format(int(len(entanglement_events))) print '====================================' print if '215201_Bell_BS_full_BellLFBT_day2_Run8' in fp_BS: print print 'Found {} valid entanglement events.'.format(int(len(entanglement_events))) print 'This does not correspond with the number of markers because there is one corrupt marker' print '====================================' print BS_LT3_data = np.hstack((entanglement_events, Total_SSRO_events_LT3)) All_combined_data = np.hstack((BS_LT3_data,Total_SSRO_events_LT4)) Combined_attributes = _a['Columns'] + ', ' + _att_LT3['Columns'] + ', ' + _att_LT4['Columns'] _combined_attributes = {'Columns': Combined_attributes} return All_combined_data, _combined_attributes