def get_residuals_interp_func(mapfile):
    df_fit = pd.read_pickle(mapfile)
    # calculate |B| and all residuals
    df_fit.eval('B = (Bx**2+By**2+Bz**2)**(1/2)', inplace=True)
    df_fit.eval('B_fit = (Bx_fit**2+By_fit**2+Bz_fit**2)**(1/2)', inplace=True)
    df_fit.eval('B_res = B - B_fit', inplace=True)
    df_fit.eval('Bx_res = Bx - Bx_fit', inplace=True)
    df_fit.eval('By_res = By - By_fit', inplace=True)
    df_fit.eval('Bz_res = Bz - Bz_fit', inplace=True)
    # setup interp functions
    xyz_res_func = get_df_interp_func(df=df_fit,
                                      Blabels=['Bx_res', 'By_res', 'Bz_res'])
    mag_res_func = get_df_interp_func(df=df_fit,
                                      Blabels=['B_res', 'By_res', 'Bz_res'])
    return xyz_res_func, mag_res_func
示例#2
0
def map_run(ic_df, B_file=fBnom, name='Mau13'):
    # load Bmap
    B_ = get_df_interp_func(filename=B_file, gauss=False)
    print("Start of emtracks Sim in Pion Degrader (tan(dip))")
    print("-----------------------------------------")
    # print(f"Directory: {outdir},\nFilename: {name},\nNumber: {N}")
    print(f"Using BField File: {B_file}")
    start = time.time()
    num_cpu = multiprocessing.cpu_count()
    print(f"CPU Cores: {num_cpu}")
    reco_tuples = Parallel(n_jobs=num_cpu)(
        delayed(run_trajectory)(row, B_)
        for i, row in tqdm(enumerate(ic_df.itertuples()),
                           file=sys.stdout,
                           desc='particle #',
                           total=len(ic_df)))
    tand0s = np.array([i[0] for i in reco_tuples])
    tands = np.array([i[1] for i in reco_tuples])
    Rmaxs = np.array([i[2] for i in reco_tuples])
    # store results in IC dataframe
    ic_df.loc[:, f"tand0_{name}"] = tand0s
    ic_df.loc[:, f"tand_{name}"] = tands
    ic_df.loc[:, f"Rmax_{name}"] = Rmaxs
    stop = time.time()
    dt = stop - start
    N = len(ic_df)
    print("Calculations Complete")
    print(f"Runtime: {dt} s, {dt/60.} min, {dt/60./60.} hr")
    print(f"Speed: {dt / (2*N)} s / trajectory\n")

    return ic_df
from emtracks.particle import trajectory_solver
from emtracks.tools import InitConds, Bounds, ic_Mu2e, bounds_Mu2e
from emtracks.mapinterp import get_df_interp_func
from emtracks.Bdist import get_B_df_distorted

ddir = "/home/ckampa/data/"
forig = ddir + "root/Mu2e_ConversionElectron_MC.pkl"
fBnom = ddir + "Bmaps/Mu2e_DSMap_V13.p"

df_Mu2e_nom = pd.read_pickle(fBnom)
df_Mu2e_dis = get_B_df_distorted(df_Mu2e_nom, v="0")
# df_Mu2e_dis = get_B_df_distorted(df_Mu2e_nom, v="0", Bz0=1000.) # 10x gradient

# B functions
B_Mu2e_nom = get_df_interp_func(df=df_Mu2e_nom,
                                gauss=False)  #, bounds=bounds_Mu2e)
B_Mu2e_dis = get_df_interp_func(df=df_Mu2e_dis,
                                gauss=False)  #, bounds=bounds_Mu2e)

del (df_Mu2e_nom)
del (df_Mu2e_dis)


def load_origins(filename=forig, N=10):
    df = pd.read_pickle(filename)
    df = df.sample(N)
    df.reset_index(inplace=True)
    return df


def create_init_conds(origin_tuple):
from joblib import Parallel, delayed
import multiprocessing

from emtracks.particle import trajectory_solver
from emtracks.mapinterp import get_df_interp_func

# generated momentum
p0 = 104.96 # MeV/c

# directories
datadir = '/home/shared_data/mao10,mao13_analysis/data/mao13no_nom/'

# set up B interpolator
ddir = '/home/shared_data/'
fBnom = ddir+"Bmaps/Mu2e_DSMap_V13.p"
B_Mu2e_nom = get_df_interp_func(fBnom, gauss=False)

# analyze a given track w nominal and distorted field
def analyze_particle_momentum(filename, datadir=datadir):
    # load track (pickle)
    # fname = datadir+f'{scale}_{theta}_{phi}_0.054_.pkl'
    e = trajectory_solver.from_pickle(datadir+filename)
    # scale from filename
    scale = float(filename[:4])
    # get full theta, phi from B
    theta0 = e.init_conds.theta0
    phi0 = e.init_conds.phi0
    # analyze in nominal only
    # nominal
    e.B_func = B_Mu2e_nom
    e.analyze_trajectory_LHelix(step=50, stride=1)
示例#5
0
scales_finer = np.linspace(.995, 1.005, 11)
scales = np.concatenate([scales_coarse, scales_fine,
                         scales_finer])  # course fields + new fields
scales_str = [
    f'{scale:.2f}' if int(round(scale * 1000 % 10)) == 0 else f'{scale:.3f}'
    for scale in scales
]

#### Solenoid off
# fields = [f'{n:0.2f}' for n in scales] # which fields to analyze
fields = scales_str
TSs = [
    '1.00' if int(round(scale * 1000 % 10)) == 0 else '1.000'
    for scale in scales
]
B_Mu2e_nom = get_df_interp_func(fBnom, gauss=False)
B_Mu2e_dis_list = [
    get_df_interp_func(fBdis.format(field, TS), gauss=False)
    for field, TS in zip(fields, TSs)
]

####

#### Linear Gradient
# df_Mu2e_nom = pd.read_pickle(fBnom)
# df_Mu2e_dis = get_B_df_distorted(df_Mu2e_nom, v="0")
# # B functions
# B_Mu2e_nom = get_df_interp_func(df=df_Mu2e_nom, gauss=False)#, bounds=bounds_Mu2e)
# B_Mu2e_dis = get_df_interp_func(df=df_Mu2e_dis, gauss=False)#, bounds=bounds_Mu2e)

# del(df_Mu2e_nom)
ddir = '/home/ckampa/data/'
# ddir = '/home/shared_data/'
# Test of Brian Fit
# fBnom = ddir+"Bmaps/Mu2e_DSMap_V13.p"
# fBdis = ddir+"Bmaps/Mau13_standard_tracker_fit_df.p"
# Test of Hall Probe biased up
fBnom = ddir + "Bmaps/Mau13_standard_tracker_fit_df.p"
# fBdis = ddir+"Bmaps/Mau13_middle_hp_bias_up_fit_df.p"
nums = [0, 1, 2, 3, 4]
fBdis_list = [
    ddir + f"Bmaps/hp_bias/Mau13_hp_{num}_bias_up_fit_df.p" for num in nums
]

# get interp funcs
B_Mu2e_nom = get_df_interp_func(fBnom,
                                gauss=False,
                                Blabels=['Bx_fit', 'By_fit', 'Bz_fit'])
# B_Mu2e_dis = get_df_interp_func(fBdis, gauss=False, Blabels=['Bx_fit', 'By_fit', 'Bz_fit'])
B_Mu2e_dis_list = [
    get_df_interp_func(fBdis,
                       gauss=False,
                       Blabels=['Bx_fit', 'By_fit', 'Bz_fit'])
    for fBdis in fBdis_list
]

step = 100  # 100 default


# analyze a given track w nominal and distorted field
def analyze_particle_momentum(particle_num, name, outdir):
    # load track (pickle)
示例#7
0
end_point = 14
initial_B = 50  #(rougly 1% distortion at z = 3.0, 0% at z = 14)
final_B = 0

m = (final_B - initial_B) / (end_point - start_point)
n = 50
step = (end_point - start_point) / n
t = np.arange(start_point, end_point, step)
x = plt.plot(t, ((t - start_point) * m) + initial_B)
plt.title("Distortion")
plt.xlabel("Z (meters)")
plt.ylabel("B (gauss)")

#MU2E FIELD
df_Mu2e = pd.read_pickle(mapdir + "Mu2e_DSMap_V13.p")
B_Mu2e_func = get_df_interp_func(mapdir + "Mu2e_DSMap_V13.p", gauss=False)

#MU2E FIELD + DIS
df_Mu2e_dis = get_B_df_distorted(df_Mu2e,
                                 v="0",
                                 Bz0=initial_B,
                                 Bzf=0,
                                 z0=start_point,
                                 zf=end_point)
B_Mu2e_dis = get_df_interp_func(df=df_Mu2e_dis, gauss=False)


#input N, return N random values between 0 and 2pi
def get_random_phi(N):
    phis = np.random.uniform(0, 2 * math.pi, N)
    return phis
示例#8
0
from emtracks.mapinterp import get_df_interp_func
from emtracks.Bdist import get_B_df_distorted

ddir = "/home/ckampa/data/"
forig = ddir + "root/Mu2e_ConversionElectron_MC.pkl"
# fBnom = ddir+"Bmaps/Mu2e_DSMap_V13.p"
# fBnom = ddir+"Bmaps/Mau13_middle_hp_bias_up_fit_df.p" # bias middle Hall probe # BAD
fBnom = ddir + "Bmaps/Mau13_standard_fit_df.p"  # bias middle Hall probe

# df_Mu2e_nom = pd.read_pickle(fBnom)
# df_Mu2e_dis = get_B_df_distorted(df_Mu2e_nom, v="0")
# # df_Mu2e_dis = get_B_df_distorted(df_Mu2e_nom, v="0", Bz0=1000.) # 10x gradient

# B functions
B_Mu2e_nom = get_df_interp_func(filename=fBnom,
                                gauss=False,
                                Blabels=['Bx_fit', 'By_fit',
                                         'Bz_fit'])  #, bounds=bounds_Mu2e)
# B_Mu2e_nom = get_df_interp_func(df=df_Mu2e_nom, gauss=False)#, bounds=bounds_Mu2e)
# B_Mu2e_dis = get_df_interp_func(df=df_Mu2e_dis, gauss=False)#, bounds=bounds_Mu2e)

# del(df_Mu2e_nom)
# del(df_Mu2e_dis)


def load_origins(filename=forig, N=10):
    df = pd.read_pickle(filename)
    df = df.sample(N)
    df.reset_index(inplace=True)
    return df

示例#9
0
    args = parser.parse_args()
    # fill defaults where needed
    if args.map_name is None:
        args.map_name = mu2e_ext_path + 'Bmaps/Mu2e_DSMap_V13.p'
    else:
        args.map_name = mu2e_ext_path + 'Bmaps/' + args.map_name
    if args.pkl_dir is None:
        args.pkl_dir = mu2e_ext_path + 'pickles/Bfit_CE_reco/default/'
    else:
        args.pkl_dir = mu2e_ext_path + 'pickles/Bfit_CE_reco/' + args.pkl_dir + '/'
    # short Bfield name
    index = [i.start() for i in re.finditer('/', args.map_name)][-1]
    Bshort = args.map_name[index + 1:-9]
    outname = args.pkl_dir + Bshort + '_deltaP.p'
    # check pickle output directory and create if doesn't exist
    check_dir(args.pkl_dir)
    # get df func
    print(f'Using Bmap File: {args.map_name}')
    Bfunc = get_df_interp_func(filename=args.map_name,
                               gauss=False,
                               Blabels=['Bx_fit', 'By_fit', 'Bz_fit'])
    # run analysis
    # N_lim = 64 # testing
    N_lim = None
    run_analysis(
        Bfunc,
        outname,
        name="run_04",
        outdir="/home/ckampa/data/pickles/distortions/linear_gradient/run_04/",
        N_lim=N_lim)
示例#10
0
# load and prep tracker fit dataframe
# df_fit = pd.read_pickle('/home/ckampa/data/Bmaps/Mau13_standard_tracker_fit_df.p')
# df_fit = pd.read_pickle('/home/ckampa/data/Bmaps/Mau13_middle_hp_bias_up_fit_df.p')
# nums = [0, 1, 2, 3, 4]
num = 4
df_fit = pd.read_pickle(
    f'/home/ckampa/data/Bmaps/hp_bias/Mau13_hp_{num}_bias_up_fit_df.p')
df_fit.eval('B = (Bx**2+By**2+Bz**2)**(1/2)', inplace=True)
df_fit.eval('B_fit = (Bx_fit**2+By_fit**2+Bz_fit**2)**(1/2)', inplace=True)
df_fit.eval('B_res = B - B_fit', inplace=True)
df_fit.eval('Bx_res = Bx - Bx_fit', inplace=True)
df_fit.eval('By_res = By - By_fit', inplace=True)
df_fit.eval('Bz_res = Bz - Bz_fit', inplace=True)
# setup interp functions
xyz_res_func = get_df_interp_func(df=df_fit,
                                  Blabels=['Bx_res', 'By_res', 'Bz_res'])
mag_res_func = get_df_interp_func(df=df_fit,
                                  Blabels=['B_res', 'By_res', 'Bz_res'])

# RUN = "run_04"
# E_ANALYSIS = False
# EMTRACK_RUN_DIR = f"/home/ckampa/data/pickles/distortions/linear_gradient/{RUN}/"
# datadir = f"/home/ckampa/data/pickles/distortions/linear_gradient/"
# ESTRIDE = 10 # testing (3 cm)
ESTRIDE = 1  # real (3 mm)


def track_residual(
        particle_num,
        Rrange=None,
        name='run_04',