Exemple #1
0
def write_partisn_input_3D():
    # Path to hdf5 test file
    THIS_DIR = os.path.dirname(os.path.realpath(__file__))
    hdf5 = THIS_DIR + '/files_test_partisn/partisn_test_geom.h5m'
    data_hdf5path = '/materials'
    nuc_hdf5path = '/nucid'
    
    # Create mesh
    xvals = [-5., 0., 10., 15.]
    yvals = [-5., 0., 5.]
    zvals = [-5., 0., 5.]
    mesh=Mesh(structured_coords=[xvals, yvals, zvals], structured=True, 
                structured_ordering='xyz')
    
    # Path for output file
    input_file = THIS_DIR + '/files_test_partisn/partisn_3D.inp'
    
    # other inputs
    ngroup = 5
    pn = 2
    
    # expected output file
    file_expected = THIS_DIR + '/files_test_partisn/partisn_3D_expected.inp'
    
    partisn.write_partisn_input(mesh, hdf5, ngroup, pn, 
        data_hdf5path=data_hdf5path, nuc_hdf5path=nuc_hdf5path, 
        input_file=input_file, num_rays=100, grid=True)
    
    out = filecmp.cmp(input_file, file_expected)
    return out
Exemple #2
0
def write_partisn_input_options():
    if not HAVE_PYMOAB:
        raise SkipTest

    """Test PARTISN input file creation with a slew of keyword arguments
    """

    THIS_DIR = os.path.dirname(os.path.realpath(__file__))
    hdf5 = os.path.join(THIS_DIR, 'files_test_partisn',
                        'partisn_test_geom.h5m')
    input_file = os.path.join(
        THIS_DIR, 'files_test_partisn', 'partisn_options.inp')
    file_expected = os.path.join(
        THIS_DIR, 'files_test_partisn', 'partisn_options_expected.inp')

    sc = [-5., 0., 10., 15.], [-5., 5.], [-5., 5.]
    mesh = Mesh(structured_coords=sc, structured=True)
    ngroup = 66

    dg = np.zeros(4, dtype=[('idx', np.int64),
                            ('cell', np.int64),
                            ('vol_frac', np.float64),
                            ('rel_error', np.float64)])
    dg[:] = [(0, 1, 1.0, 0.0), (1, 1, 0.5, 0.04714045207910317),
             (1, 2, 0.5, 0.04714045207910317), (2, 2, 1.0, 0.0)]
    mat_assigns = {1: 'mat:Helium, Natural', 2: 'mat:Mercury',
                   5: 'mat:Graveyard', 6: u'mat:Vacuum'}

    cards = {"block1": {"isn": 6,
                        "maxscm": 3000000,
                        "maxlcm": 6000000,
                        },
             "block2": {"hello": "from block2"},
             "block3": {"lib": "xsf21-71",
                        "lng": 175,
                        "maxord": 5,
                        "ihm": 227,
                        "iht": 10,
                        "ihs": 11,
                        "ifido": 1,
                        "ititl": 1,
                        "i2lp1": 0,
                        "savbxs": 1,
                        "kwikrd": 1
                        },
             "block4": {"hello": "from block4"},
             "block5": {"source": "<this is a dummy source>"}
             }

    with warnings.catch_warnings(record=True) as w:
        partisn.write_partisn_input(mesh, hdf5, ngroup, input_file=input_file,
                                    dg=dg, mat_assigns=mat_assigns, fine_per_coarse=3,
                                    cards=cards, num_rays=9)  # include num_rays to get warning

    # verify we get a warning from including num_rays and dg
    out1 = len(w) == 1
    out2 = filecmp.cmp(input_file, file_expected)
    os.remove(input_file)
    return out1 and out2
Exemple #3
0
def write_partisn_input_with_names_dict():
    try:
        from pyne import dagmc
    except:
        raise SkipTest

    if not HAVE_PYMOAB:
        raise SkipTest

    # Path to hdf5 test file
    THIS_DIR = os.path.dirname(os.path.realpath(__file__))
    hdf5 = THIS_DIR + "/files_test_partisn/partisn_test_geom.h5m"
    data_hdf5path = "/materials"

    # Create mesh
    xvals = [-5.0, 0.0, 10.0, 15.0]
    yvals = [-5.0, 5.0]
    zvals = [-5.0, 5.0]
    mesh = Mesh(
        structured_coords=[xvals, yvals, zvals],
        structured=True,
        structured_ordering="xyz",
    )

    # nuc_names list
    names = {}
    names[800000000] = "hg"
    names[20030000] = "he3"
    names[20040000] = "he4"

    # Path for output file
    input_file = THIS_DIR + "/files_test_partisn/partisn_nucnames.inp"

    # other inputs
    ngroup = 5

    # expected output file
    file_expected = THIS_DIR + "/files_test_partisn/partisn_nucnames_expected.inp"

    partisn.write_partisn_input(
        mesh,
        hdf5,
        ngroup,
        data_hdf5path=data_hdf5path,
        input_file=input_file,
        num_rays=100,
        grid=True,
        names_dict=names,
    )

    out = filecmp.cmp(input_file, file_expected)
    os.remove(input_file)
    return out
Exemple #4
0
def write_partisn_input_1D():
    try:
        from pyne import dagmc
    except:
        raise SkipTest

    if not HAVE_PYMOAB:
        raise SkipTest

    # Path to hdf5 test file
    THIS_DIR = os.path.dirname(os.path.realpath(__file__))
    hdf5 = THIS_DIR + '/files_test_partisn/partisn_test_geom.h5m'
    data_hdf5path = '/materials'
    nuc_hdf5path = '/nucid'

    # Create mesh
    xvals = [-5., 0., 10., 15.]
    yvals = [-5., 5.]
    zvals = [-5., 5.]
    mesh = Mesh(structured_coords=[xvals, yvals, zvals],
                structured=True,
                structured_ordering='xyz')

    # Path for output file
    input_file = THIS_DIR + '/files_test_partisn/partisn_1D.inp'

    # other inputs
    ngroup = 5

    # expected output file
    file_expected = THIS_DIR + '/files_test_partisn/partisn_1D_expected.inp'

    partisn.write_partisn_input(mesh,
                                hdf5,
                                ngroup,
                                data_hdf5path=data_hdf5path,
                                nuc_hdf5path=nuc_hdf5path,
                                input_file=input_file,
                                num_rays=100,
                                grid=True)

    out = filecmp.cmp(input_file, file_expected)
    os.remove(input_file)
    assert (out == True)
    return out
Exemple #5
0
def step1(cfg):
    """This function writes the PARTISN input file for the adjoint photon
    transport
    Parameters
    ----------
    cfg : dictionary
        User input for step 1 from the config.yml file
    """
    # Get user-input from config file
    geom = cfg["geom_file"]
    cells = [cfg["src_cell"]]
    src_vol = [float(cfg["src_vol"])]

    try:
        origin_x, origin_y, origin_z = cfg["origin"].split(" ")
    except:
        print("Too few entries in origin location")

    xmesh = cfg["xmesh"]
    xints = cfg["xints"]
    ymesh = cfg["ymesh"]
    yints = cfg["yints"]
    zmesh = cfg["zmesh"]
    zints = cfg["zints"]

    # Create structured mesh
    sc = [
        np.linspace(float(origin_x), float(xmesh), float(xints) + 1),
        np.linspace(float(origin_y), float(ymesh), float(yints) + 1),
        np.linspace(float(origin_z), float(zmesh), float(zints) + 1),
    ]
    m = Mesh(structured=True, structured_coords=sc)
    m.write_hdf5("blank_mesh.h5m")

    # Generate 42 photon energy bins [eV]
    #  First bin has been replaced with 1 for log interpolation
    photon_bins = np.array(
        [
            1e-6,
            0.01,
            0.02,
            0.03,
            0.045,
            0.06,
            0.07,
            0.075,
            0.1,
            0.15,
            0.2,
            0.3,
            0.4,
            0.45,
            0.51,
            0.512,
            0.6,
            0.7,
            0.8,
            1,
            1.33,
            1.34,
            1.5,
            1.66,
            2,
            2.5,
            3,
            3.5,
            4,
            4.5,
            5,
            5.5,
            6,
            6.5,
            7,
            7.5,
            8,
            10,
            12,
            14,
            20,
            30,
            50,
        ]
    )
    # ICRP 74 flux-to-dose conversion factors in pico-Sv/s per photon flux
    de = np.array(
        [
            0.01,
            0.015,
            0.02,
            0.03,
            0.04,
            0.05,
            0.06,
            0.07,
            0.08,
            0.1,
            0.15,
            0.2,
            0.3,
            0.4,
            0.5,
            0.6,
            0.8,
            1,
            2,
            4,
            6,
            8,
            10,
        ]
    )
    df = np.array(
        [
            0.0485,
            0.1254,
            0.205,
            0.2999,
            0.3381,
            0.3572,
            0.378,
            0.4066,
            0.4399,
            0.5172,
            0.7523,
            1.0041,
            1.5083,
            1.9958,
            2.4657,
            2.9082,
            3.7269,
            4.4834,
            7.4896,
            12.0153,
            15.9873,
            19.9191,
            23.76,
        ]
    )
    # Convert to Sv/s per photon FLUX
    pico = 1.0e-12
    df = df * pico
    # Convert pointwise data to group data for log interpolation
    photon_spectrum = pointwise_collapse(photon_bins, de, df, logx=True, logy=True)
    #  Anything below 0.01 MeV should be assigned the DF value of 0.01 MeV
    photon_spectrum[0] = df[0]
    # Total number of groups is 217 (42 photon + 175 neutron)
    spectra = [np.append(photon_spectrum, np.zeros(175))]
    # The spectrum is normalized by PyNE, so we need to mutliply by the sum of
    # intensities in the spectrum.
    # Additionally, we divide by the volume of the source cell in order to get
    # source density.
    intensities = [np.sum(spectra) / src_vol]

    # Load geometry into DAGMC
    load(geom)
    # Generate isotropic photon volume source
    source, dg = isotropic_vol_source(geom, m, cells, spectra, intensities)

    # PARTISN input
    ngroup = 217  # total number of energy groups
    cards = _cards(source)  # block 1, 3, 5 input values
    names_dict = _names_dict()  # dictionary of isotopes (PyNE nucids to bxslib names)

    write_partisn_input(
        m,
        geom,
        ngroup,
        cards=cards,
        dg=dg,
        names_dict=names_dict,
        data_hdf5path="/materials",
        nuc_hdf5path="/nucid",
        fine_per_coarse=1,
    )