def plotting_automatic_beta_ratio_abs(file_input,file_output,plane,freeNo): name_output = os.path.join(file_output,"beta_beating_ratio_abs" + str(plane.lower()) + ".pdf") gs = gridspec.GridSpec(1, 1,height_ratios=[1]) ax1 = subplot(gs[0]) beta = "BET" + str(plane.upper()) beta_error_phase = "ERRBET" + str(plane.upper()) beta_error_amplitude = "BET" + str(plane.upper()) + "STD" beta_model = "BET" + str(plane.upper()) + "MDL" if plane == "x": ax1.set_ylabel(r'$\beta_A,x / \beta_{\Phi,x}$ [-]') elif plane == "y": ax1.set_ylabel(r'$\beta_A,y / \beta_{\Phi,y}$ [-]') ax1.set_xlabel(r'Longitudinal location [m]') #for i in range(len(results)): freeVer = ".out" if freeNo > 1: freeVer = "_free{}.out".format(freeNo) elif freeNo == 1: freeVer = "_free.out" print("freeNo = ",freeNo) print("freeVer = ",freeVer) beta_file_phase = tfs_pandas.read_tfs(os.path.join(file_input,"getbeta" + str(plane.lower()) + freeVer)) beta_file_amplitude = tfs_pandas.read_tfs(os.path.join(file_input,"getampbeta" + str(plane.lower()) + freeVer)) beta_error_phase_values = getattr(beta_file_phase,beta_error_phase) beta_error_amplitude_values = getattr(beta_file_amplitude,beta_error_amplitude) b_pha = getattr(beta_file_phase,beta) b_pha_sig = beta_error_phase_values b_amp = getattr(beta_file_amplitude,beta) b_amp_sig =beta_error_amplitude_values beta_error_total = ( (b_amp_sig/b_pha)**2 + ( (b_amp*b_pha_sig)/(b_pha**2) )**2 )**0.5 betaratio = getattr(beta_file_amplitude,beta) / getattr(beta_file_phase,beta) names = getattr(beta_file_amplitude,"NAME") print " BPM_IDX RATIOAMPPHASE ERROR" for i in range(len(betaratio)): print " %03d %s %f %f" % (i, names[i], betaratio[i],beta_error_total[i]) #print "BETA RATIO ABS ######" #print getattr(beta_file_amplitude,beta) / getattr(beta_file_phase,beta) #print beta_error_total #print b_pha_sig #print b_amp_sig ax1.errorbar(beta_file_phase.S, getattr(beta_file_amplitude,beta) / getattr(beta_file_phase,beta) , yerr = beta_error_total , fmt='o', color=COLOR_LIST_LIGHT[0], markersize=4, markeredgecolor=COLOR_LIST_DARK[0]) handles, labels = ax1.get_legend_handles_labels() ax1.legend(handles, labels, loc=4, frameon=True, numpoints = 1, ncol=1) tight_layout() savefig(name_output)
def _get_bpm_names(orbit_path_left, orbit_path_right): return { (BEAM1, LEFT): tfs_pandas.read_tfs( os.path.join(orbit_path_left, "LHCB1.orbit1.tfs"))["NAME"], (BEAM1, RIGHT): tfs_pandas.read_tfs( os.path.join(orbit_path_right, "LHCB1.orbit1.tfs"))["NAME"], (BEAM2, LEFT): tfs_pandas.read_tfs( os.path.join(orbit_path_left, "LHCB2.orbit1.tfs"))["NAME"], (BEAM2, RIGHT): tfs_pandas.read_tfs( os.path.join(orbit_path_right, "LHCB2.orbit1.tfs"))["NAME"], }
def add_mqts_to_errtab(errtab_path, unmatched_path, matched_path): errtab_tfs = tfs_pandas.read_tfs(errtab_path).set_index("NAME", drop=False) unmatched_tfs = tfs_pandas.read_tfs(unmatched_path).set_index("NAME") matched_tfs = tfs_pandas.read_tfs(matched_path).set_index("NAME") mqt_names = [name for name in unmatched_tfs.index.values if "MQT." in name] mqt_changes = np.array(matched_tfs.loc[mqt_names, "K1L"].values - unmatched_tfs.loc[mqt_names, "K1L"].values, dtype=float).round(decimals=10) errtab_tfs.loc[mqt_names, "K1L"] = mqt_changes # TODO: when creating validation set: replace original errtab file with new one - write the file! tfs_pandas.write_tfs(errtab_path, errtab_tfs, save_index=True)
def _get_beta_beat_file(self, label, plane): outpath = self._matcher_model.get_output_path() file_data = tfs_pandas.read_tfs(os.path.join( outpath, "sbs", "sbsbetabeat" + plane.lower() + "_" + label + ".out") ) return file_data
def clean_tunes(files, limit=DEF_LIMIT): for file in files: file_df = tfs_pandas.read_tfs(file) mask = _get_mask(file_df, limit) file_df = file_df.loc[mask, :] _recompute_tune_stats(file_df) tfs_pandas.write_tfs(file_df, file_df.headers, file)
def get_variables(cls, frm=None, to=None, classes=None): correctors_dir = os.path.join(LHC_DIR, "2012", "correctors") all_corrs = _merge_jsons( os.path.join(correctors_dir, "correctors_b" + str(cls.get_beam()), "beta_correctors.json"), os.path.join(correctors_dir, "correctors_b" + str(cls.get_beam()), "coupling_correctors.json"), cls._get_triplet_correctors_file(), ) my_classes = classes if my_classes is None: my_classes = all_corrs.keys() vars_by_class = set( _flatten_list([ all_corrs[corr_cls] for corr_cls in my_classes if corr_cls in all_corrs ])) elems_matrix = tfs_pandas.read_tfs( cls._get_corrector_elems()).sort_values("S").set_index("S").loc[ frm:to, :] vars_by_position = _remove_dups_keep_order( _flatten_list([ raw_vars.split(",") for raw_vars in elems_matrix.loc[:, "VARS"] ])) return _list_intersect_keep_order(vars_by_position, vars_by_class)
def _collect_orbit_data(orbit_path_left, orbit_path_right, kleft_path, kright_path): k_file_left = tfs_pandas.read_tfs(kleft_path) k_file_left["TIME"] = k_file_left["TIME"].astype(long) k_file_left.set_index("TIME", inplace=True) k_file_right = tfs_pandas.read_tfs(kright_path) k_file_right["TIME"] = k_file_right["TIME"].astype(long) k_file_right.set_index("TIME", inplace=True) orbit_paths = {LEFT: orbit_path_left, RIGHT: orbit_path_right} k_files = {LEFT: k_file_left, RIGHT: k_file_right} k = {} _apply_to_beam_side_plane( lambda beam, side, plane: k.__setitem__((beam, side, plane), []) ) bpm = {} _apply_to_beam_side_plane( lambda beam, side, plane: bpm.__setitem__((beam, side, plane), []) ) for side in SIDES: orbit_path = orbit_paths[side] k_file = k_files[side] for filename in os.listdir(orbit_path): orbit_data = tfs_pandas.read_tfs(os.path.join( orbit_path, filename )) timestamp = _date_str_to_timestamp(orbit_data.headers["OrbitDate"]) if filename.startswith('LHCB1'): beam = BEAM1 elif filename.startswith('LHCB2'): beam = BEAM2 try: k[(beam, side, HOR)].append(_get_sign(beam, HOR) * k_file.loc[timestamp, "K"]) k[(beam, side, VER)].append(_get_sign(beam, VER) * k_file.loc[timestamp, "K"]) orbit_x = orbit_data.loc[:, "X"] bpm[(beam, side, HOR)].append(orbit_x - np.mean(orbit_x)) orbit_y = orbit_data.loc[:, "Y"] bpm[(beam, side, VER)].append(orbit_y - np.mean(orbit_y)) except KeyError: continue return k, bpm
def plot(self): self._figure.clear() self._axes_to_data = {} axes_x = self._figure.add_subplot(2, 1, 1) axes_x.set_title(self._matcher_model.get_name()) file_horizontal = tfs_pandas.read_tfs(os.path.join( self._matcher_model.get_output_path(), "sbs", "sbsphasext_" + str(self._matcher_model.get_label()) + ".out") ) self._axes_to_data[axes_x] = file_horizontal self._plot_match(axes_x, file_horizontal, "X") file_vertical = tfs_pandas.read_tfs(os.path.join( self._matcher_model.get_output_path(), "sbs", "sbsphaseyt_" + str(self._matcher_model.get_label()) + ".out") ) axes_y = self._figure.add_subplot(2, 1, 2) self._axes_to_data[axes_y] = file_vertical self._plot_match(axes_y, file_vertical, "Y") self._figure.tight_layout() self._figure.patch.set_visible(False) self._figure.canvas.draw()
def get_errors_from_file(common_errors_path, b1_path, b2_path, b1_tw_before_match, b1_tw_after_match, b2_tw_before_match, b2_tw_after_match): #all errors in form: all beam 1 quads, all beam 2 quads (so triplets are repeated) -> due to distributions of bpms in input data all_errors = [] triplet_errors_tfs = tfs_pandas.read_tfs(common_errors_path).set_index( "NAME") triplet_errors = triplet_errors_tfs.K1L.values # K1L != 0 -> not mqts, save tfs_error_file_b1 = tfs_pandas.read_tfs(b1_path).set_index("NAME") # replace K1L of MQT in original table (0) with matched - unmatched difference, per knob (2 different values for all MQTs) b1_unmatched = tfs_pandas.read_tfs(b1_tw_before_match).set_index("NAME") b1_matched = tfs_pandas.read_tfs(b1_tw_after_match).set_index("NAME") mqt_names_b1 = [ name for name in b1_unmatched.index.values if "MQT." in name ] mqt_changes = np.array(b1_matched.loc[mqt_names_b1, "K1L"].values - b1_unmatched.loc[mqt_names_b1, "K1L"].values, dtype=float).round(decimals=10) mqt_errors_b1 = np.unique(mqt_changes).round(decimals=10) mqt_errors_b1 = [k for k in mqt_errors_b1 if k != 0] other_magnets_names_b1 = [ name for name in tfs_error_file_b1.index.values if ("MQT." not in name and "MQX" not in name) ] other_errors_b1 = tfs_error_file_b1.loc[other_magnets_names_b1, "K1L"].values tfs_error_file_b2 = tfs_pandas.read_tfs(b2_path).set_index("NAME") # replace K1L of MQT in original table (0) with matched - unmatched difference, per knob (2 different values for all MQTs) b2_unmatched = tfs_pandas.read_tfs(b2_tw_before_match).set_index("NAME") b2_matched = tfs_pandas.read_tfs(b2_tw_after_match).set_index("NAME") mqt_names_b2 = [ name for name in b2_unmatched.index.values if "MQT." in name ] mqt_changes_b2 = np.array(b2_matched.loc[mqt_names_b2, "K1L"].values - b2_unmatched.loc[mqt_names_b2, "K1L"].values, dtype=float).round(decimals=10) mqt_errors_b2 = np.unique(mqt_changes_b2).round(decimals=10) mqt_errors_b2 = [k for k in mqt_errors_b2 if k != 0] other_magnets_names_b2 = [ name for name in tfs_error_file_b2.index.values if ("MQT." not in name and "MQX" not in name) ] other_errors_b2 = tfs_error_file_b2.loc[other_magnets_names_b2, "K1L"].values all_errors.extend(triplet_errors) all_errors.extend(other_errors_b1) all_errors.extend(mqt_errors_b1) all_errors.extend(other_errors_b2) all_errors.extend(mqt_errors_b2) return all_errors
def compute_offset(model1_path, model2_path, orbit_path_left, orbit_path_right, kleft_path, kright_path, ip): ks, orbits = _collect_orbit_data(orbit_path_left, orbit_path_right, kleft_path, kright_path) model1 = tfs_pandas.read_tfs(model1_path) model1.set_index("NAME", inplace=True) model2 = tfs_pandas.read_tfs(model2_path) model2.set_index("NAME", inplace=True) bpm_names = _get_bpm_names(orbit_path_left, orbit_path_right) models = {BEAM1: model1, BEAM2: model2} results = _apply_to_beam_side_plane( lambda beam, side, plane: _compute_and_clean( ip, beam, side, plane, models, bpm_names, ks, orbits ) ) return results
def get_input_for_beam(tw_perturbed_path, mdl_path, beam): ip_bpms_b1 = [ "BPMSW.1L1.B1", "BPMSW.1R1.B1", "BPMSW.1L2.B1", "BPMSW.1R2.B1", "BPMSW.1L5.B1", "BPMSW.1R5.B1", "BPMSW.1L8.B1", "BPMSW.1R8.B1" ] ip_bpms_b2 = [ "BPMSW.1L1.B2", "BPMSW.1R1.B2", "BPMSW.1L2.B2", "BPMSW.1R2.B2", "BPMSW.1L5.B2", "BPMSW.1R5.B2", "BPMSW.1L8.B2", "BPMSW.1R8.B2" ] tw_perturbed = tfs_pandas.read_tfs(tw_perturbed_path).set_index("NAME") mdl = tfs_pandas.read_tfs(mdl_path).set_index("NAME") tw_perturbed_reindexed = tw_perturbed.loc[mdl.index, :] beta_star = [] ip_bpms = ip_bpms_b1 if beam == 1 else ip_bpms_b2 for bpm in ip_bpms: beta_star.append(tw_perturbed_reindexed.loc[bpm, "BETX"] - mdl.loc[bpm, "BETX"]) beta_star.append(tw_perturbed_reindexed.loc[bpm, "BETY"] - mdl.loc[bpm, "BETY"]) delta_mux = tw_perturbed_reindexed.MUX - mdl.MUX delta_muy = tw_perturbed_reindexed.MUY - mdl.MUY delta_dx = tw_perturbed_reindexed.NDX - mdl.NDX return beta_star, delta_mux, delta_muy, delta_dx
def _loadtwiss_beta(varandpath): (var, path) = varandpath x = 0 try: x = tfs_pandas.read_tfs(path + "/twiss." + var) x = x.set_index('NAME').drop_duplicates() x['Q1']=x.headers['Q1'] x['Q2']=x.headers['Q2'] os.remove(path + "/twiss." + var) print x.headers['Q2'] except IOError as e: print e return [] return var, x
def get_segment(cls, label, first_elem, last_elem, optics_file): segment_cls = type(cls.__name__ + "Segment", (_LhcSegmentMixin, cls), {}) segment_inst = segment_cls() beam = cls.get_beam() bpms_file_name = "beam1bpms.tfs" if beam == 1 else "beam2bpms.tfs" bpms_file = _get_file_for_year(cls.YEAR, bpms_file_name) bpms_file_data = tfs_pandas.read_tfs(bpms_file).set_index("NAME") first_elem_s = bpms_file_data.loc[first_elem, "S"] last_elem_s = bpms_file_data.loc[last_elem, "S"] segment_inst.label = label segment_inst.start = Element(first_elem, first_elem_s) segment_inst.end = Element(last_elem, last_elem_s) segment_inst.optics_file = optics_file segment_inst.xing = False segment_inst.verify_object() return segment_inst
def plot(self): self._figure.clear() self._axes_to_data = {} label = self._matcher_model.get_label() axes_f1001 = self._figure.add_subplot(2, 1, 1) axes_f1010 = self._figure.add_subplot(2, 1, 2) outpath = self._matcher_model.get_output_path() file_coup = tfs_pandas.read_tfs( os.path.join(outpath, "sbs", "sbscouple_" + label + ".out")) self._axes_to_data[axes_f1001] = file_coup self._axes_to_data[axes_f1010] = file_coup self._plot_match(axes_f1001, file_coup, "f1001") self._plot_match(axes_f1010, file_coup, "f1010") self._figure.tight_layout() self._figure.patch.set_visible(False) self._figure.canvas.draw()
def calc_dp_over_p(main_input, bpm_names, bpm_data): model_twiss = tfs_pandas.read_tfs(main_input.model) model_twiss.set_index("NAME", inplace=True) sequence = model_twiss.headers["SEQUENCE"].lower().replace("b1", "").replace( "b2", "") accel_cls = manager.get_accel_class(sequence) arc_bpms_mask = accel_cls.get_arc_bpms_mask(bpm_names) arc_bpm_data = bpm_data[arc_bpms_mask] arc_bpm_names = bpm_names[arc_bpms_mask] dispersions = model_twiss.loc[arc_bpm_names, "DX"] * 1e3 # We need it in mm closed_orbits = np.mean(arc_bpm_data, axis=1) numer = np.sum(dispersions * closed_orbits) denom = np.sum(dispersions**2) if denom == 0.: raise ValueError("Cannot compute dpp probably no arc BPMs.") dp_over_p = numer / denom return dp_over_p
def main(elems_file, sequences, output): sequences = sequences.split(",") elems_data = tfs_pandas.read_tfs(elems_file) s = elems_data.S names = elems_data.NAME length = elems_data.L elem_to_vars = get_elem_to_vars(names, sequences) variables = [] for name in names: variables.append("".join(elem_to_vars[name])) all_data = OrderedDict(( ("S", s), ("L", length), ("NAME", names), ("VARS", variables), )) data_frame = pd.DataFrame( data=all_data, columns=all_data.keys(), ).sort_values(by="S") tfs_pandas.write_tfs(data_frame, {}, output)
#plt.title(r"$\beta$ Beating") plt.xlabel("$s \; [m]$") ax1.set_ylabel(r'$ \Delta \beta/ \beta \; [\%]$') #plt.axes().set_aspect(aspect=150) #ax1.set_aspect(1.50) #ax2.set_aspect(1.50) plt.savefig(args.printplt) if args.histogram: g, ax1 = plt.subplots(figsize=(width*.65, height*.65)) getbetax = tfs_pandas.read_tfs(getbetaxpath) mybins = np.arange(0, 10, .25) ax1.hist(getbetax["ERRBET{:s}".format(plane)] / getbetax["BET{:s}MDL".format(plane)] * 100.0, mybins, color='#A0A0FF', edgecolor='#0000FF') plt.xlabel(r"size of errorbar$ \; [\%]$") ax1.set_ylabel(r'count') plt.tight_layout() plt.savefig(args.printplt.replace(".pdf", "_hist.pdf")) g.show() raw_input() #plotScatterX(outputich, output3bpm, twisserr)
# ============================================================================= # ======== function definitions =============================================== # ============================================================================= resultspath = "/user/slops/data/LHC_DATA/OP_DATA/Betabeat/" list_dir = os.listdir(resultspath) logfile = open( "/afs/cern.ch/work/a/awegsche/public/CCC_postprocessing/results", "w") for d in list_dir: if d.startswith("2017"): print d walked = os.walk(resultspath + d) for tripl in walked: if "getbetax_free.out" in tripl[2]: getbetax = tfs.read_tfs(tripl[0] + "/getbetax_free.out") logfile.write(tripl[0] + "; " + getbetax.headers["Command"] + "; nothng\n") print "\33[38;2;255;0;0m", tripl[0], "\33[0m" logfile.close() #print os.walk(resultspath)
def _read_tfs(path, file_name): return tfs.read_tfs(os.path.join(path, file_name))
def global_correction( accel_cls, meas_dir_path, model_twiss_path, fullresponse_path, optics_file=_DEFAULTS["optics_file"], output_path=_DEFAULTS["output_path"], singular_value_cut=_DEFAULTS["singular_value_cut"], modelcut=_DEFAULTS["modelcut"], errorcut=_DEFAULTS["errorcut"], weights_on_quantities=_DEFAULTS["weights_on_quantities"], use_errorbars=_DEFAULTS["use_errorbars"], variables=_DEFAULTS["variables"], beta_file_name=_DEFAULTS["beta_file_name"], virt_flag=_DEFAULTS["virt_flag"], num_reiteration=_DEFAULTS["num_reiteration"], ): """ #TODO: Documentation! """ if optics_file is None: optics_file = os.path.join(os.path.dirname(model_twiss_path), "modifiers.madx") if output_path is None: output_path = meas_dir_path w_dict = _get_weights_dictionary(weights_on_quantities) m_dict = _get_cut_str_to_dict(modelcut) e_dict = _get_cut_str_to_dict(errorcut) nominal_model = tfs.read_tfs(model_twiss_path) full_response = _load_fullresponse(fullresponse_path) varslist = _get_varlist(accel_cls, variables, virt_flag) keys, meas_dict = _scan_meas_dir(_ALL_KEYS, w_dict, meas_dir_path, beta_file_name) keys, meas_dict = _filter_measurement(meas_dict, nominal_model, keys, w_dict, use_errorbars, e_dict, m_dict) full_response = _filter_response_columns(full_response, meas_dict, keys) meas_dict = _append_model_to_measurement(nominal_model, meas_dict, keys) _print_rms(meas_dict, keys) _dump(os.path.join(output_path, "measurement_dict.bin"), meas_dict) deltas = _calculate_deltas(full_response, meas_dict, keys, varslist, cut=singular_value_cut) writeparams(deltas, varslist, output_path) for i in range(num_reiteration): LOGGER.debug("Running MADX:" + str(i + 1)) template_file_path = os.path.join(CURRENT_DIR, "job.twiss_python.madx") madx_script = _create_madx_script(accel_cls, nominal_model, optics_file, template_file_path, output_path) _callMadx(madx_script) # TODO new_model_path = os.path.join(output_path, "twiss_" + str(i) + ".dat") shutil.copy2(os.path.join(output_path, "twiss_corr.dat"), new_model_path) new_model = tfs.read_tfs(new_model_path) meas_dict = _append_model_to_measurement(new_model, meas_dict, keys) _print_rms(meas_dict, keys) ideltas = _calculate_deltas( full_response, meas_dict, keys, varslist, cut=singular_value_cut, ) writeparams(ideltas, varslist, output_path, append=True) deltas = deltas + ideltas LOGGER.debug("Cumulative deltas:" + str(np.sum(np.abs(deltas)))) write_knob(deltas, varslist)