Esempio n. 1
0
from hepqpr.qallse.dsmaker import create_dataset

if __name__ == '__main__':

    #---- Script Constants
    test_densities = [0.75] * 3
    trials_per_density = 1
    input_seed = None

    #---- Test Doublet Making
    results, indx = [[i] for i in test_densities], 0
    for ds in test_densities:
        for _ in range(trials_per_density):
            print(f'--> Testing doublet_making with {ds} density')
            stat_row = create_dataset(density=ds,
                                      min_hits_per_track=5,
                                      high_pt_cut=1.0,
                                      random_seed=input_seed,
                                      double_hits_ok=False,
                                      gen_doublets=True,
                                      test_mode=True)
            results[indx].extend(stat_row)
            indx += 1

    #---- Write Tests to Disk
    titles = 'density,runtime,recall,precision,doublets_made,seed'.split(',')
    stats = pd.DataFrame(results, columns=titles)
    print('--> Stats')
    print(stats)
    stats.to_csv('75_parallel_results.csv', index=False)
Esempio n. 2
0
if __name__ == '__main__':
    mat = []
    for row in ds_info.strip().split('\n'):
        e, d, s = row.split(',')
        event, ds, seed = int(e), float(d), int(s)
        prefix = f'ds{ds*100:.0f}'

        print(f'\n>>>> {prefix} <<<<\n')
        with time_this() as time_info:
            metas, path = create_dataset(density=ds,
                                         input_path=op.join(
                                             trackml_train_path,
                                             f'event00000{event}-hits.csv'),
                                         output_path=output_path,
                                         prefix=prefix,
                                         min_hits_per_track=5,
                                         high_pt_cut=1.0,
                                         random_seed=int(seed),
                                         double_hits_ok=False,
                                         gen_doublets=True)

        mat.append([
            event,
            int(ds * 100),
            metas['num_hits'],
            metas['num_noise'],
            metas['num_tracks'],
            metas['num_important_tracks'],
            seed,
            time_info[0],
Esempio n. 3
0
        prefix += f'_noPhiCut'
        ds_options["prefix"] = prefix

        # generate the dataset
        import os
        path = os.path.join(ds_options['output_path'], prefix,
                            "event000001000")
        if os.path.exists(path + "-hits.csv"):
            import json
            with open(path + "-meta.json") as f:
                meta = json.load(f)
            with open(path + "-metaHits.pickle", 'rb') as f:
                time_info = pickle.load(f)
        else:
            with time_this() as time_info:
                meta, path = create_dataset(**ds_options)
            with open(os.path.join(path + "-metaHits.pickle"), 'wb') as f:
                pickle.dump(time_info, f)

        results[k]['TReadingHits'] = time_info[1]
        results[k]['meta'] = meta

        from hepqpr.qallse.seeding import generate_doublets, SeedingConfig
        if nPhi == 53: seedingConfig = nPhi53SeedingConfig
        if nPhi == 100: seedingConfig = nPhi100SeedingConfig
        if nPhi == 75: seedingConfig = nPhi75SeedingConfig
        if nPhi == 25: seedingConfig = nPhi25SeedingConfig
        if nPhi == 10: seedingConfig = nPhi10SeedingConfig
        if nPhi == 6: seedingConfig = nPhi6SeedingConfig
        if nPhi == 1: seedingConfig = nPhi1SeedingConfig
        # generate the doublets: the important part is the config_cls !
Esempio n. 4
0
model_class = QallseD0  # model class to use
extra_config = dict()  # configuration arguments overriding the defaults

#: FIXME experimental pegasus setup
P6 = dnx.pegasus_graph(-100, nice_coordinates=True)
classical_sampler = neal.SimulatedAnnealingSampler()
tabu_sampler = TabuSampler()
#sampler = dimod.StructureComposite(classical_sampler, P6.nodes, P6.edges)
sampler = dimod.StructureComposite(tabu_sampler, P6.nodes, P6.edges)

tempdir = tempfile.TemporaryDirectory()
print(f'using {tempdir.name}')

metas, path = create_dataset(output_path=tempdir.name,
                             random_seed=240834351,
                             gen_doublets=True,
                             **dsmaker_config)

#path =  '/tmp/hpt-collapse/ds10/event000001000'

with open(path + '-meta.json') as f:
    print(f.read())

# load data
dw = DataWrapper.from_path(path)
doublets = pd.read_csv(path + '-doublets.csv')
if add_missing:
    doublets = dw.add_missing_doublets(doublets)
else:
    p, r, ms = dw.compute_score(doublets)
    print(f'got {len(doublets)}.')