library = openmc.data.DataLibrary() for name, paths in sorted(tables.items()): # Convert first temperature for the table p = paths[0] print(f'Converting: {p}') if p.name.endswith('t'): data = openmc.data.ThermalScattering.from_ace(p) else: data = openmc.data.IncidentNeutron.from_ace(p, 'mcnp') # For each higher temperature, add cross sections to the existing table for p in paths[1:]: print(f'Adding: {p}') if p.name.endswith('t'): data.add_temperature_from_ace(p) else: data.add_temperature_from_ace(p, 'mcnp') # Export HDF5 file h5_file = args.destination / f'{data.name}.h5' print(f'Writing {h5_file}...') data.export_to_hdf5(h5_file, 'w', libver=args.libver) # Register with library library.register_file(h5_file) # Handle photoatomic data if args.photon is not None: lib = openmc.data.ace.Library(args.photon)
# Group together tables for the same nuclide tables = defaultdict(list) for table in lib.tables: zaid, xs = table.name.split('.') tables[zaid].append(table) for zaid, tables in sorted(tables.items()): # Convert first temperature for the table print(f'Converting: {tables[0].name}') data = openmc.data.IncidentNeutron.from_ace(tables[0], 'mcnp') # For each higher temperature, add cross sections to the existing table for table in tables[1:]: print(f'Adding: {table.name}') data.add_temperature_from_ace(table, 'mcnp') # Export HDF5 file h5_file = args.destination / f'{data.name}.h5' print(f'Writing {h5_file}...') data.export_to_hdf5(h5_file, 'w', libver=args.libver) # Register with library library.register_file(h5_file) # Handle S(a,b) tables endf70sab = args.mcnpdata / 'endf70sab' if endf70sab.exists(): lib = openmc.data.ace.Library(endf70sab) # Group together tables for the same nuclide
filenames.sort(key=lambda x: int(x.parts[-2].split('_')[1][:-1])) # Create output directory if it doesn't exist args.destination.mkdir(parents=True, exist_ok=True) library = openmc.data.DataLibrary() for name, filenames in sorted(tables.items()): # Convert first temperature for the table print('Converting: ' + str(filenames[0])) data = openmc.data.IncidentNeutron.from_ace(filenames[0]) # For each higher temperature, add cross sections to the existing table for filename in filenames[1:]: print(f'Adding: {filename}') data.add_temperature_from_ace(filename) # Export HDF5 file h5_file = args.destination / f'{data.name}.h5' print('Writing {}...'.format(h5_file)) data.export_to_hdf5(h5_file, 'w', libver=args.libver) # Register with library library.register_file(h5_file) # ============================================================================== # GENERATE HDF5 LIBRARY -- S(A,B) FILES # Group together tables for same nuclide tables = defaultdict(list) for filename in sorted(release_details[args.release]['sab_files']):
for p in sorted((ace_files_dir / 'ace_293').glob('*.ace'), key=key): print(f'Converting: {p}') temp, z, a, m = key(p) data = openmc.data.IncidentNeutron.from_ace(p) if m == 'm' and not data.name.endswith('_m1'): # Correct metastable data.metastable = 1 data.name += '_m1' for T in ('600', '900', '1200', '1500', '1800'): p_add = ace_files_dir / f'ace_{T}' / (p.stem.replace('293', T) + '.ace') print(f'Adding temperature: {p_add}') data.add_temperature_from_ace(p_add) h5_file = args.destination / f'{data.name}.h5' data.export_to_hdf5(h5_file, 'w', libver=args.libver) lib.register_file(h5_file) # ============================================================================== # CONVERT THERMAL SCATTERING FILES thermal_mats = [ 'al-sap', 'be', 'ca-cah2', 'd-d2o', 'graph', 'h-cah2',