コード例 #1
0
def _sampling_mp(ismp, skip_title=False, skip_fend=False):
    global init, pnu, pxs, plpc, pchi, pfy, tape
    t0 = time.time()
    mat = tape.mat[0]
    newtape = Endf6(tape.copy())
    extra_points = np.logspace(-5, 7, init.energy_sequence)
    if not pxs.empty:
        xs = newtape.get_xs()
        if not xs.empty:
            xspert = xs.perturb(pxs[ismp])
            newtape = newtape.update_xs(xspert)
    if not pnu.empty:
        nubar = newtape.get_nubar()
        if not nubar.empty:
            nubarpert = nubar.perturb(pnu[ismp])
            newtape = newtape.update_nubar(nubarpert)
    if not pchi.empty:
        edistr = pfns.from_endf6(newtape).add_points(extra_points)
        if not edistr.empty:
            edistrpert = edistr.perturb(pchi[ismp])
            newtape = newtape.update_edistr(edistrpert)
    if not plpc.empty:
        lpc = newtape.get_lpc().add_points(extra_points)
        if not lpc.empty:
            lpcpert = lpc.perturb(plpc[ismp])
            newtape = newtape.update_lpc(lpcpert)
    if not pfy.empty:
        fy = newtape.get_fy()
        if not fy.empty:
            fypert = fy.perturb(pfy[ismp])
            newtape = newtape.update_fy(fypert)
    print("Created sample {} for MAT {} in {:.2f} sec".format(ismp, mat, time.time()-t0,))
    descr = ["perturbed file No.{} created by SANDY".format(ismp)]
    return newtape.delete_cov().update_info(descr=descr).write_string(skip_title=skip_title, skip_fend=skip_fend)
コード例 #2
0
ファイル: endf6_test.py プロジェクト: luca-fiorito-11/sandy
def test_update_info(testPu9):
    testPu9.loc[9437, 3, 1].TEXT = "\n".join(
        testPu9.loc[9437, 3, 1].TEXT.splitlines()[:10]) + "\n"
    testPu9 = Endf6(testPu9.drop([(9437, 3, 102)]))
    new = testPu9.update_info()
    recordsold = testPu9.read_section(9437, 1, 451)["RECORDS"]
    recordsnew = new.read_section(9437, 1, 451)["RECORDS"]
    assert (3, 102, 147, 1) in recordsold
    assert (3, 102, 147, 1) not in recordsnew
    assert (3, 1, 188, 1) in recordsold
    assert (3, 1, 10, 1) in recordsnew
コード例 #3
0
def test_sample_fy():
    tape = Endf6.from_text("\n".join(FY.endf6))
    nsmp = 1000
    ismp = randint(1, nsmp)
    fyu5 = tape.get_fy(listenergy=[4e5]).filter_by("MAT", 9228)
    fyall = tape.get_fy()
    cov = fyu5.get_cov(mat=9228, mt=454, energy=4e5)
    perts = cov.get_samples(nsmp)
    pert = perts[ismp]
    fynew = fyall.perturb(pert)
    assert (fyall.query("MAT!=9228 & MT!=454 & E!=4e5") == fynew.query(
        "MAT!=9228 & MT!=454 & E!=4e5")).all().all()
    fy = fynew.query("MAT==9228 & MT==454 & E==4e5")
    assert not (fyall.query("MAT==9228 & MT==454 & E==4e5") == fy).all().all()
    assert (fy.YI >= 0).all()
    assert (fy.YI <= fy.YI * 2).all()
コード例 #4
0
def test_sample_xs():
    errtape = Errorr.from_text("\n".join(H1.errorr))
    nsmp = 1000
    perts = XsCov.from_errorr(
        errtape.filter_by(listmt=[102, 451])).get_samples(nsmp, eig=10)
    pendftape = Endf6.from_text("\n".join(H1.pendf))
    xs = pendftape.get_xs()
    ismp = randint(1, nsmp)
    pert = perts[ismp]
    pxs = xs.perturb(pert)
    newtape = pendftape.update_xs(pxs)
    assert perts.shape[1] == nsmp
    mat = 125
    mt = 102
    ratio = pxs / xs.values
    pert = pert.loc[125, 102]
    ugrid = pert.index
    pert = pert.reindex(pert.index.union(
        ratio.index)).ffill().fillna(1).reindex(ratio.index)
    assert np.allclose(ratio[mat, mt], pert)
    assert newtape.loc[125, 3, 102].TEXT != pendftape.loc[125, 3, 102].TEXT
コード例 #5
0
def sampling(iargs=None):
    """Construct multivariate normal distributions with a unit vector for 
    mean and with relative covariances taken from the evaluated files.
    Perturbation factors are sampled with the same multigroup structure of 
    the covariance matrix, and are applied to the pointwise data to produce 
    the perturbed files.
    """
    global init, pnu, pxs, plpc, pchi, pfy, tape
    init = parse(iargs)
    ftape = read_formatted_file(init.file)
    covtape = read_formatted_file(init.cov) if init.cov else ftape
    # nsub = ftape.get_nsub()
    # INITIALIZE PERT DF
    pnu = pd.DataFrame()
    pxs = pd.DataFrame()
    plpc = pd.DataFrame()
    pchi = pd.DataFrame()
    pfy = pd.DataFrame()
    ftape, covtape, pnu, pxs, plpc, pchi, pfy = extract_samples(ftape, covtape)
    df = {}
    if pnu.empty and pxs.empty and plpc.empty and pchi.empty and pfy.empty:
        logging.warn("no covariance section was selected/found")
        return ftape, covtape, df
    # APPLY PERTURBATIONS BY MAT
    for imat,(mat, tape) in enumerate(sorted(ftape.groupby('MAT'))):
        skip_title = False if imat == 0 else True
        skip_fend = False if imat == len(ftape.mat) - 1 else True
        tape = Endf6(tape)
        kw = dict(skip_title=skip_title, skip_fend=skip_fend)
        if init.processes == 1:
            outs = {i : _sampling_mp(i, **kw) for i in range(1,init.samples+1)}
        else:
            pool = mp.Pool(processes=init.processes)
            outs = {i : pool.apply_async(_sampling_mp, (i,), kw) for i in range(1,init.samples+1)}
            outs = {i : out.get() for i,out in outs.items()}
            pool.close()
            pool.join()
        df.update({mat : outs})
    # DUMP TO FILES
    frame = pd.DataFrame(df)
    frame.index.name = "SMP"
    frame.columns.name = "MAT"
    frame = frame.stack()
    outname = init.outname if init.outname else os.path.split(init.file)[1]
    for ismp,dfsmp in frame.groupby("SMP"):
        output = os.path.join(init.outdir, '{}-{}'.format(outname, ismp))
        with open(output, 'w') as f:
            for mat,dfmat in dfsmp.groupby("MAT"):
                f.write(frame[ismp,mat])
    # PRODUCE ACE FILES
    if init.acer:
        if init.processes == 1:
            for i in range(1,init.samples+1):
                _process_into_ace(ismp) 
        else:
            pool = mp.Pool(processes=init.processes)
            outs = {i : pool.apply_async(_process_into_ace, (i,)) for i in range(1,init.samples+1)}
            pool.close()
            pool.join()
    return ftape, covtape, df 




    pdb.set_trace()
    df = {}
    if init.fission_yields:
        # EXTRACT FY PERTURBATIONS FROM COV FILE
        fy = ftape.get_fy(listmat=init.mat, listmt=init.mt)
        if fy.empty:
            logging.warn("no fission yield section was selected/found")
            return
        index = fy.index.to_frame(index=False)
        dfperts = []
        for mat,dfmat in index.groupby("MAT"):
            for mt,dfmt in dfmat.groupby("MT"):
                for e,dfe in dfmt.groupby("E"):
                    fycov = fy.get_cov(mat, mt, e)
                    pert = fycov.get_samples(init.samples, eig=0)
                    dfperts.append(pert)
        PertFy = FySamples(pd.concat(dfperts))
        if init.debug: PertFy.to_csv("perts_mf8.csv")
        # DELETE LOCAL VARIABLES
        for k in locals().keys():
            del locals()[k]
        # APPLY PERTURBATIONS BY MAT
        for imat,(mat, tape) in enumerate(sorted(ftape.groupby('MAT'))):
            skip_title = False if imat == 0 else True
            skip_fend = False if imat == ftape.index.get_level_values("MAT").unique().size -1 else True
            tape = Endf6(tape)
            kw = dict(skip_title=skip_title, skip_fend=skip_fend)
            if mat not in init.mat:
                out = tape.write_string(**kw)
                outs = {i : out for i in range(1,init.samples+1)}
            else:
                if init.processes == 1:
                    outs = {i : _sampling_fy_mp(i, **kw) for i in range(1,init.samples+1)}
                else:
                    pool = mp.Pool(processes=init.processes)
                    outs = {i : pool.apply_async(_sampling_fy_mp, (i,), kw) for i in range(1,init.samples+1)}
                    outs = {i : out.get() for i,out in outs.items()}
                    pool.close()
                    pool.join()
            df.update({ mat : outs })
    else:
        # EXTRACT NUBAR PERTURBATIONS FROM ENDF6 FILE
        PertNubar = pd.DataFrame()
        if 31 in init.mf and 31 in ftape.mf:
            nubarcov = XsCov.from_endf6(covtape.filter_by(listmat=init.mat, listmf=[31], listmt=listmt))
            if not nubarcov.empty:
                PertNubar = nubarcov.get_samples(init.samples, eig=init.eig)
                if init.debug:
                    PertNubar.to_csv("perts_mf31.csv")
        # EXTRACT PERTURBATIONS FROM EDISTR COV FILE
        PertEdistr = pd.DataFrame()
        if 35 in init.mf and 35 in ftape.mf:
            edistrcov = ftape.get_edistr_cov()
            if not edistrcov.empty:
                PertEdistr = edistrcov.get_samples(init.samples, eig=init.eig)
                if init.debug:
                    PertEdistr.to_csv("perts_mf35.csv")
        # EXTRACT PERTURBATIONS FROM LPC COV FILE
        PertLpc = pd.DataFrame()
        if 34 in init.mf and 34 in covtape.mf:
            lpccov = ftape.get_lpc_cov()
            if not lpccov.empty:
                if init.max_polynomial:
                    lpccov = lpccov.filter_p(init.max_polynomial)
                PertLpc = lpccov.get_samples(init.samples, eig=init.eig)
                if init.debug:
                    PertLpc.to_csv("perts_mf34.csv")
        # EXTRACT XS PERTURBATIONS FROM COV FILE
        PertXs = pd.DataFrame()
        if 33 in init.mf and 33 in covtape.mf:
            if init.errorr and len(ftape.mat) > 1: # Limit imposed by running ERRORR to get covariance matrices
                raise SandyError("More than one MAT number was found")
            if ftape.get_file_format() == "endf6":
                with tempfile.TemporaryDirectory() as td:
                    outputs = njoy.process(init.file, broadr=False, thermr=False, 
                                           unresr=False, heatr=False, gaspr=False, 
                                           purr=False, errorr=init.errorr, acer=False,
                                           wdir=td, keep_pendf=True,
                                           temperatures=[0], suffixes=[0], err=0.005)[2]
                    ptape = read_formatted_file(outputs["tape30"])
                    if init.errorr:
                        covtape = read_formatted_file(outputs["tape33"]) # WARNING: by doing this we delete the original covtape
                ftape = ftape.delete_sections((None, 3, None)). \
                              add_sections(ptape.filter_by(listmf=[3])). \
                              add_sections(ptape.filter_by(listmf=[1], listmt=[451]))
            listmterr = init.mt if init.mt is None else [451].extend(init.mt) # ERRORR needs MF1/MT451 to get the energy grid
            covtape = covtape.filter_by(listmat=init.mat, listmf=[1,33], listmt=listmterr)
            covtype = covtape.get_file_format()
            xscov = XsCov.from_errorr(covtape) if covtype == "errorr" else XsCov.from_endf6(covtape)
            if not xscov.empty:
                PertXs = xscov.get_samples(init.samples, eig=init.eig, seed=init.seed33)
                if init.debug:
                    PertXs.to_csv(os.path.join(init.outdir, "perts_mf33.csv"))
        if PertLpc.empty and PertEdistr.empty and PertXs.empty and PertNubar.empty:
            sys.exit("no covariance section was selected/found")
            return
        pdb.set_trace()
        # DELETE LOCAL VARIABLES
        for k in locals().keys():
            del locals()[k]
        # APPLY PERTURBATIONS BY MAT
        for imat,(mat, tape) in enumerate(sorted(ftape.groupby('MAT'))):
            skip_title = False if imat == 0 else True
            skip_fend = False if imat == ftape.index.get_level_values("MAT").unique().size -1 else True
            tape = Endf6(tape)
            kw = dict(skip_title=skip_title, skip_fend=skip_fend)
            if init.processes == 1:
                outs = {i : _sampling_mp(i, **kw) for i in range(1,init.samples+1)}
            else:
                pool = mp.Pool(processes=init.processes)
                outs = {i : pool.apply_async(_sampling_mp, (i,), kw) for i in range(1,init.samples+1)}
                outs = {i : out.get() for i,out in outs.items()}
                pool.close()
                pool.join()
            df.update({ mat : outs })
    # DUMP TO FILES
    frame = pd.DataFrame(df)
    frame.index.name = "SMP"
    frame.columns.name = "MAT"
    frame = frame.stack()
    outname = init.outname if init.outname else os.path.split(init.file)[1]
    for ismp,dfsmp in frame.groupby("SMP"):
        output = os.path.join(init.outdir, '{}-{}'.format(outname, ismp))
        with open(output, 'w') as f:
            for mat,dfmat in dfsmp.groupby("MAT"):
                f.write(frame[ismp,mat])
コード例 #6
0
def process_proton(endftape, wdir="", dryrun=False, tag="", exe=None, route="0", **kwargs):
    """Run sequence to process proton file with njoy.
    
    Parameters
    ----------
    wdir : `str`
        working directory (absolute or relative) where all output files are
        saved
        .. note:
            
            `wdir` will appear as part of the `filename` in 
            any `xsdir` file
    dryrun : `bool`
        option to produce the njoy input file without running njoy
    tag : `str`
        tag to append to each output filename beofre the extension (default is `None`)
        .. hint:
            to process JEFF-3.3 files you could set `tag = "_j33"`
    exe : `str`
        njoy executable (with path)
        .. note:
            If no executable is given, SANDY looks for a default executable in `PATH`
    route : `str`
        xsdir "route" parameter (default is "0")
    
    Returns
    -------
    input : `str`
        njoy input text
    inputs : `map`
        map of {`tape` : `file`) for input files
    outputs : `map`
        map of {`tape` : `file`) for ouptut files
    """
    tape = Endf6.from_file(endftape)
    mat = tape.mat[0]
    info = tape.read_section(mat, 1, 451)
    meta = info["LISO"]
    za = int(info["ZA"])
    za_new = za + meta*100 + 300 if meta else za
    inputs = {}
    outputs = {}
    kwargs["mat"] = mat
    inputs["tape20"] = endftape
    kwargs["temp"] = 0
    kwargs["suff"] = suff = ".00"
    text = _acer_input(20, 20, 50, 70, **kwargs)
    outputs["tape50"] = os.path.join(wdir, "{}{}{}h".format(za_new, tag, suff))
    outputs["tape70"] = os.path.join(wdir, "{}{}{}h.xsd".format(za_new, tag, suff))
    text += "stop"
    if not dryrun:
        _run_njoy(text, inputs, outputs, exe=exe)
        # Change route and filename in xsdir file.
        acefile = outputs["tape50"]
        xsdfile = outputs["tape70"]
        text_xsd = open(xsdfile).read(). \
                                 replace("route", route). \
                                 replace("filename", acefile)
        text_xsd = " ".join(text_xsd.split())
        # If isotope is metatable rewrite ZA in xsdir and ace as ZA = Z*1000 + 300 + A + META*100.
        if meta:
            pattern = '{:d}'.format(za) + '\.(?P<ext>\d{2}[ct])'
            found = re.search(pattern, text_xsd)
            ext = found.group("ext")
            text_xsd = text_xsd.replace("{:d}.{}".format(za, ext), "{:d}.{}".format(za_new, ext), 1)
            text_ace = open(acefile).read()
            text_ace = text_ace.replace("{:d}.{}".format(za, ext), "{:d}.{}".format(za_new, ext), 1)
            with open(acefile, 'w') as f:
                f.write(text_ace)
        with open(xsdfile, 'w') as f:
            f.write(text_xsd)
    return text, inputs, outputs
コード例 #7
0
def process(endftape,
            pendftape=None,
            kermas=[302, 303, 304, 318, 402, 442, 443, 444, 445, 446, 447],
            temperatures=[293.6],
            suffixes=None,
            broadr=True,
            thermr=True,
            unresr=False,
            heatr=True,
            gaspr=True,
            purr=True,
            errorr=False,
            acer=True,
            wdir="",
            dryrun=False,
            tag="",
            method=None,
            exe=None,
            keep_pendf=True,
            route="0",
            addpath=None,
            **kwargs):
    """
    Run sequence to process file with njoy.
    
    Parameters
    ----------
    pendftape : `str`, optional, default is `None`
        skip module reconr and use this PENDF file 
    kermas : iterable of `int`, optional, default is `[302, 303, 304, 318, 402, 442, 443, 444, 445, 446, 447]`
        MT numbers for partial kermas to pass to heatr.
        .. note:: `MT=301` is the KERMA total (energy balance) and is always calculated
    temperatures : iterable of `float`, optional, default is [293.6]
        iterable of temperature values in K
    suffixes : iterable of `int`, optional, default is `None`
        iterable of suffix values for ACE files: if `None` is given, use internal routine to determine suffixes
        .. warning:: `suffixes` must match the number of entries in `temperatures`
    broadr : `bool`, optional, default is `True`
        option to run module broadr
    thermr : `bool`, optional, default is `True`
        option to run module thermr
    unresr : `bool`, optional, default is `False`
        option to run module unresr
    heatr : `bool`, optional, default is `True`
        option to run module heatr 
    gaspr : `bool`, optional, default is `True`
        option to run module gapr
    purr : `bool`, optional, default is `True`
        option to run module purr
    errorr : `bool`, optional, default is `False`
        option to run module errorr
    acer : `bool`, optional, default is `True`
        option to run module acer
    wdir : `str`, optional, default is `""`
        working directory (absolute or relative) where all output files are saved
        .. note:: `wdir` will appear as part of the `filename` in any `xsdir` file if `addpath` is not set
    addpath : `str`, optional, default is `None`
        path to add in xsdir, by default use `wdir`
    dryrun : `bool`, optional, default is `False`
        option to produce the njoy input file without running njoy
    tag : `str`, optional, default is `""`
        tag to append to each output filename before the extension (default is `None`)
        .. hint:: to process JEFF-3.3 files you could set `tag = "_j33"`
    exe : `str`, optional, default is `None`
        njoy executable (with path)
        .. note:: if no executable is given, SANDY looks for a default executable in `PATH` and in env variable `NJOY`
    keep_pendf : `bool`, optional, default is `True`
        save output PENDF file
    route : `str`, optional, default is `0`
        xsdir "route" parameter
    
    Returns
    -------
    input : `str`
        njoy input text
    inputs : `map`
        map of {`tape` : `file`) for input files
    outputs : `map`
        map of {`tape` : `file`) for ouptut files
    """
    tape = Endf6.from_file(endftape)
    mat = tape.mat[0]
    info = tape.read_section(mat, 1, 451)
    meta = info["LISO"]
    za = int(info["ZA"])
    zam = za*10 + meta
    za_new = za + meta*100 + 300 if meta else za
    outprefix = zam if method == "aleph" else za_new
    inputs = {}
    outputs = {}
    # Only kwargs are passed to NJOY inputs, therefore add temperatures and mat
    kwargs.update({"temperatures" : temperatures, "mat" : mat})
    # Check input args
    if not suffixes:
        suffixes = [get_suffix(temp, meta, method) for temp in temperatures]
    if len(suffixes) != len(temperatures):
        raise SandyError("number of suffixes must match number of temperatures")
    inputs["tape20"] = endftape
    e = 21
    p = e + 1
    text = _moder_input(20, -e)
    if pendftape:
        inputs["tape99"] = pendftape
        text += _moder_input(99, -p)
    else:
        text += _reconr_input(-e, -p, **kwargs)
    if broadr:
        o = p + 1
        text += _broadr_input(-e, -p, -o, **kwargs)
        p = o
    if thermr:
        o = p + 1 
        text += _thermr_input(0, -p, -o, **kwargs)
        p = o
    if unresr:
        o = p + 1
        text += _unresr_input(-e, -p, -o, **kwargs)
        p = o
    if heatr:
        for i in range(0, len(kermas), 7):
            o = p + 1
            kwargs["pks"] = kermas[i:i+7]
            text += _heatr_input(-e, -p, -o, **kwargs)
            p = o
    if gaspr:
        o = p + 1
        text += _gaspr_input(-e, -p, -o, **kwargs)
        p = o
    if purr:
        o = p + 1
        text += _purr_input(-e, -p, -o, **kwargs)
        p = o
    if keep_pendf:
        o = 30
        text += _moder_input(-p, o)
        outputs["tape{}".format(o)] = os.path.join(wdir, "{}{}.pendf".format(outprefix, tag))
    if errorr:
        for i,(temp,suff) in enumerate(zip(temperatures, suffixes)):
            o = 33 + i
            kwargs["temp"] = temp
            kwargs["suff"] = suff = ".{}".format(suff)
            text += _errorr_input(-e, -p, o, **kwargs)
            outputs["tape{}".format(o)] = os.path.join(wdir, "{}{}{}.errorr".format(outprefix, tag, suff))
    if acer:
        for i,(temp,suff) in enumerate(zip(temperatures, suffixes)):
            a = 50 + i
            x = 70 + i
            kwargs["temp"] = temp
            kwargs["suff"] = suff = ".{}".format(suff)
            text += _acer_input(-e, -p, a, x, **kwargs)
            outputs["tape{}".format(a)] = os.path.join(wdir, "{}{}{}c".format(outprefix, tag, suff))
            outputs["tape{}".format(x)] = os.path.join(wdir, "{}{}{}c.xsd".format(outprefix, tag, suff))
    text += "stop"
    if not dryrun:
        _run_njoy(text, inputs, outputs, exe=exe)
        if acer:
            # Change route and filename in xsdir file.
            for i,(temp,suff) in enumerate(zip(temperatures, suffixes)):
                a = 50 + i
                x = 70 + i
                acefile = outputs["tape{}".format(a)]
                if addpath is None:
                    filename = acefile
                else:
                    filename = os.path.basename(acefile)
                    if addpath:
                        filename = os.path.join(addpath, filename)
                xsdfile = outputs["tape{}".format(x)]
                text_xsd = open(xsdfile).read(). \
                                         replace("route", route). \
                                         replace("filename", filename)
                text_xsd = " ".join(text_xsd.split())
                # If isotope is metatable rewrite ZA in xsdir and ace as ZA = Z*1000 + 300 + A + META*100.
                if meta and method != "aleph":
                    pattern = '{:d}'.format(za) + '\.(?P<ext>\d{2}[ct])'
                    found = re.search(pattern, text_xsd)
                    ext = found.group("ext")
                    text_xsd = text_xsd.replace("{:d}.{}".format(za, ext), "{:d}.{}".format(za_new, ext), 1)
                    text_ace = open(acefile).read()
                    text_ace = text_ace.replace("{:d}.{}".format(za, ext), "{:d}.{}".format(za_new, ext), 1)
                    with open(acefile, 'w') as f:
                        f.write(text_ace)
                with open(xsdfile, 'w') as f:
                    f.write(text_xsd)
    return text, inputs, outputs
コード例 #8
0
ファイル: endf6_test.py プロジェクト: luca-fiorito-11/sandy
def testU8():
    tape = Endf6.from_text("\n".join(U8.endf6))
    assert (tape.index.get_level_values("MAT").unique() == 9237).all()
    return tape
コード例 #9
0
ファイル: endf6_test.py プロジェクト: luca-fiorito-11/sandy
def testU5():
    tape = Endf6.from_text("\n".join(U5.nfy))
    assert (tape.index.get_level_values("MAT").unique() == 9228).all()
    return tape
コード例 #10
0
ファイル: endf6_test.py プロジェクト: luca-fiorito-11/sandy
def test_write_to_string(testH1):
    string = testH1.write_string()
    newtape = Endf6.from_text(string)
    assert testH1.equals(newtape)
コード例 #11
0
ファイル: endf6_test.py プロジェクト: luca-fiorito-11/sandy
def testFe56():
    tape = Endf6.from_text("\n".join(Fe56.endf6))
    assert (tape.index.get_level_values("MAT").unique() == 2631).all()
    return tape
コード例 #12
0
ファイル: endf6_test.py プロジェクト: luca-fiorito-11/sandy
def testH1():
    tape = Endf6.from_text("\n".join(H1.pendf))
    assert (tape.index.get_level_values("MAT").unique() == 125).all()
    return tape