Example #1
0
def test_all_particles_are_loaded():
    Particle.load_table(data.open_text(data, "particle2018.csv"))
    assert len(Particle.all()) == 605
    Particle.load_table(data.open_text(data, "particle2019.csv"))
    assert len(Particle.all()) == 610
    Particle.load_table(data.open_text(data, "particle2020.csv"))
    assert len(Particle.all()) == 610

    Particle.load_table(data.open_text(data, "nuclei2020.csv"))
    assert len(Particle.all()) == 5880

    # Load default table to restore global state
    Particle.load_table()
Example #2
0
def test_generate(tmp_path):
    'This verifies that the input and output files match.'

    particle2008 = tmp_path / 'particle2008.csv'
    particle2018 = tmp_path / 'particle2018.csv'

    produce_files(particle2008, particle2018, '2018')

    particle2008_data = data.open_text(data, 'particle2008.csv')
    with particle2008.open() as src, particle2008_data as res:
        assert src.read() == res.read()

    particle2018_data = data.open_text(data, 'particle2018.csv')
    with particle2018.open() as src, particle2018_data as res:
        assert src.read() == res.read()
Example #3
0
def test_generate(tmp_path):
    "This verifies that the input and output files match."

    particle2008 = tmp_path / "particle2008.csv"
    particle2019 = tmp_path / "particle2019.csv"

    produce_files(particle2008, particle2019, "2019")

    particle2008_data = data.open_text(data, "particle2008.csv")
    with particle2008.open() as src, particle2008_data as res:
        assert src.read() == res.read()

    particle2019_data = data.open_text(data, "particle2019.csv")
    with particle2019.open() as src, particle2019_data as res:
        assert src.read() == res.read()
Example #4
0
def test_DirectionalMaps():
    filename = data.open_text(data, "pdgid_to_pythiaid.csv")
    PDG2PyIDMap, Py2PDGIDMap = DirectionalMaps(
        "PDGID", "PythiaID", filename=filename, converters=(int, int)
    )

    assert len(PDG2PyIDMap) == 538
    assert len(Py2PDGIDMap) == 538

    assert "DirectionalMap(PDGID->PYTHIAID)" in str(PDG2PyIDMap)
    assert "DirectionalMap(PYTHIAID->PDGID)" in str(Py2PDGIDMap)

    with pytest.raises(MatchingIDNotFound):
        pyid = PDG2PyIDMap[PDGID(9000221)]
    with pytest.raises(MatchingIDNotFound):
        pdgid = Py2PDGIDMap[PythiaID(9000221)]
Example #5
0
def test_generate(tmp_path):
    "This verifies that the input and output files match."

    particle2018 = tmp_path / "particle2018.csv"
    particle2019 = tmp_path / "particle2019.csv"

    produce_files(particle2018, particle2019, "DUMMY", "2019")
    """
    # No longer test this file, which eventually will be removed
    particle2018_data = data.open_text(data, "particle2018.csv")
    with particle2018.open() as src, particle2018_data as res:
        src = [l for l in src.readlines() if not l.startswith("#")]
        res = [l for l in res.readlines() if not l.startswith("#")]
        assert src == res
    """

    particle2019_data = data.open_text(data, "particle2019.csv")
    with particle2019.open() as src, particle2019_data as res:
        src = [l for l in src.readlines() if not l.startswith("#")]
        res = [l for l in res.readlines() if not l.startswith("#")]
        assert src == res
Example #6
0
def read_pdg(year, constraints):
    """Read particle masses and widths from the PDG data file of a given year."""
    FlavioParticle.load_table(p_data.open_text(p_data, "particle{}.csv".format(year)))
    for particle in FlavioParticle.flavio_all():
        for data in (particle.flavio_m, particle.flavio_tau):
            if data is None:
                continue
            name, tex, description, central, right, left = data
            try:
                # if parameter already exists, remove existing constraints on it
                p = Parameter[name]
                constraints.remove_constraint(name)
            except KeyError:
                # otherwise, create it
                p = Parameter(name)
            p.tex = tex
            p.description = description
            if right == left:
                constraints.add_constraint([name],
                    NormalDistribution(central, right))
            else:
                constraints.add_constraint([name],
                    AsymmetricNormalDistribution(central,
                    right_deviation=right, left_deviation=left))
Example #7
0
def test_explicit_table_loading():
    Particle.load_table(data.open_text(data, "particle2019.csv"))
    assert Particle.table_loaded() == True
    assert len(Particle.table_names()) == 1
    assert Particle.all() is not None
Example #8
0
def test_file_has_latex(filename):
    particle_data = data.open_text(data, filename)
    p = pd.read_csv(particle_data)

    assert p[p.Latex == ''].empty
Example #9
0
def test_file_dup(filename):
    particle_data = data.open_text(data, filename)
    p = pd.read_csv(particle_data)

    duplicates = {item for item, count in Counter(p.ID).items() if count > 1}
    assert duplicates == set()
def test_file_has_latex(filename):
    with data.open_text(data, filename) as particle_data:
        p = pd.read_csv(particle_data, comment="#")

    assert p[p.Latex == ""].empty
def test_file_dup(filename):
    with data.open_text(data, filename) as particle_data:
        p = pd.read_csv(particle_data, comment="#")

    duplicates = {item for item, count in Counter(p.ID).items() if count > 1}
    assert duplicates == set()