def add_receiver_theory_points( ds, output_file=None, theory_ds='POLARBEAR/pb2a_cryostat_japan_measurements.txt'): if type(ds) is str: ds = mf.Dataset(from_file=ds) elif type(ds) is mf.Dataset: ds = ds.copy() else: raise ValueError('Type of dataset argument not recoginized.') if type(theory_ds) is str: theoryds = mf.Dataset(from_file=theory_ds) elif type(theory_ds) is mf.Dataset: theoryds = theory_ds.copy() else: raise ValueError('Type of theory dataset argument not recoginized.') dsuse = ds.subset_from_labels(MEASURED_RECEIVER_TARGET_NAMES) with mf.AlignDatasets(ds1=dsuse, ds2=theoryds, fitmap={'s': False}) as tamodel: tatrace = pm.sample(2000, tune=5500, init='advi+adapt_diag', nuts_kwargs={ 'target_accept': .90, 'max_treedepth': 25 }, error_scale1=1., error_scale2=1.) pm.save_trace(tatrace) pm.traceplot(tatrace) plt.show() fptheory = theoryds.subset_from_marker('FOCALPLANE') pos, err = tamodel.use_transform_trace(fptheory.to_tensors(), tatrace) newtheoryarray = mf.DatasetArrays(pos=np.mean(pos, axis=0), err=np.std(pos, axis=0), serr=np.std(pos, axis=0)) newfptheory = fptheory.remake_from_arrays(newtheoryarray) for p in newfptheory.values(): ds.add_point(p) print(ds)
def load_model_primary(): ds1 = mf.Dataset(from_file='20181205_primary_receiver.txt', name='ds20181205') ds2 = mf.Dataset(from_file='20181208_primary_receiver.txt', name='ds20181208') with mf.AlignDatasets(ds1=ds1, ds2=ds2, use_marker='PRIMARY', fitmap={ 'tx': True, 'ty': True, 'tz': True, 's': True, 'rx': True, 'ry': True, 'rz': True, 'rescale_errors': True }) as model: return model
def load_model_moons(): ds1 = mf.Dataset(from_file='./MOONS/VSTARS/moons_20160802_1_aligned.txt', name='DS1') ds2 = mf.Dataset(from_file='./MOONS/VSTARS/moons_20160802_2_aligned.txt', name='DS2') ds3 = mf.Dataset(from_file='./MOONS/VSTARS/moons_20160802_3_aligned.txt', name='DS3') with mf.AlignDatasets(ds1=ds1, ds2=ds3, use_marker='TARGET', fitmap={ 'tx': True, 'ty': True, 'tz': True, 's': True, 'rx': True, 'ry': True, 'rz': True }) as model: return model
rename_ds = mf.Dataset(from_file=args.dataset) rename_vals = [] if has_duplicate_label(rename_ds): print( f'Dataset {rename_ds.label} has duplicate labels. If this is true after renaming it will not be saved.' ) for template in args.template: this_temp = mf.Dataset(from_file=template) if has_duplicate_label(this_temp): print(f'Template {this_temp.name} has duplicate labels. Skipping.') continue with mf.AlignDatasets(ds1=this_temp, ds2=rename_ds, use_marker='CODE') as model: maptrans = pm.find_MAP(model=model) vals = ['tx', 'ty', 'tz', 'rx', 'ry', 'rz', 's'] tdict = {} for val in vals: for key in maptrans: if val in key: tdict[val] = maptrans[key] trans = mf.TheanoTransform(trans=tdict) renamet = rename_ds.to_tensors() renametprime = trans * renamet rename_pos = renametprime.pos.eval() for point in this_temp.values(): this_vec_dists = rename_pos.T - point.pos this_dists = np.linalg.norm(this_vec_dists, axis=1) minidx = np.argsort(this_dists)