def check(source): result = 0 logger.info("Check model {}", source) source = Path(source) parse_result: ParseResult = from_file(source) model = parse_result.universe logger.debug("Read the model okay") universes = model.get_universes() check_duplicates(universes, "universe", Universe.name) cells = [] surfaces = [] transformations = [] compositions = [] for u in universes: cells.extend(u) surfaces.extend(u.get_surfaces(inner=False)) transformations_to_add = collect_transformations(u) if transformations_to_add: transformations.extend(transformations_to_add) compositions.extend(u.get_compositions()) if not transformations: transformations = None if not compositions: compositions = None check_duplicates(cells, "cell", Card.name) check_duplicates(surfaces, "surface", Card.name) check_duplicates(transformations, "transformation", Card.name) check_duplicates(compositions, "composition", Card.name) return result
def transform( output: Path, transformation: str, transformations: Path, source: Path, override: bool, ) -> None: logger.info("Transforming model from {s}", s=source) if output.exists() and not override: raise FileExistsError( f"File {output} already exists. Remove it or use --override option" ) parse_result: ParseResult = from_file(source) src: Universe = parse_result.universe trans = int(transformation) logger.debug("Loading transformations from {}", transformations) transformations_text = transformations.read_text() transformations_list = list( map( lambda c: parse_transformation(c.text), clean_mcnp_cards(split_to_cards(transformations_text)), ) ) transformations_index = IndexOfNamed.from_iterable( transformations_list, on_duplicate=raise_on_duplicate_strategy, ) if trans not in transformations_index: raise ValueError(f"Transformation {trans} is not found in {transformations}") the_transformation = transformations_index[trans] dst = src.transform(the_transformation) save_mcnp(dst, output, override)
def load_universes( fill_descriptor, universes_dir ) -> Dict[int, Tuple[mk.Universe, Union[int, List[float]]]]: filler_path_map: Dict[int, Tuple[Path, mk.Universe]] = dict() cell_filler_map: Dict[int, Tuple[mk.Universe, Union[int, List[float]]]] = dict() for k, v in fill_descriptor.items(): if isinstance(v, dict) and "universe" in v: cell_name = int(k) universe_name = int(v["universe"]) transformation = v.get("transform", None) universe_path = Path(v["file"]) if universe_name in filler_path_map: prev_path, prev_universe = filler_path_map[universe_name] if prev_path == universe_path: universe = prev_universe else: raise ValueError(f'\n \ Filler number {universe_name} specifies different paths to load \n \ "{prev_path}" and "{universe_path}"') else: if universe_path.exists(): load_path = universe_path else: load_path = universes_dir / universe_path if not load_path.exists(): raise FileNotFoundError(universe_path) logger.info("Loading file {u}", u=load_path) parse_result: ParseResult = from_file(load_path) universe = parse_result.universe universe.rename(name=universe_name) filler_path_map[universe_name] = (universe_path, universe) cell_filler_map[cell_name] = (universe, transformation) return cell_filler_map
from mckit.parser.mcnp_input_sly_parser import Card, Universe, from_file from mckit.utils import assert_all_paths_exist, get_root_dir from mckit.utils.logging import logger as LOG dotenv.load_dotenv(dotenv_path=".env", verbose=True) DNFM_ROOT: Path = get_root_dir("DNFM_ROOT", "~/dev/mcnp/dnfm") CMODEL_ROOT: Path = get_root_dir("CMODEL_ROOT", "~/dev/mcnp/cmodel") MODEL_DIR: Path = DNFM_ROOT / "models/c-model" LOG.info("DNFM_ROOT=%s", DNFM_ROOT) LOG.info("CMODEL_ROOT=%s", CMODEL_ROOT) assert_all_paths_exist(CMODEL_ROOT, DNFM_ROOT, MODEL_DIR) NJOBS = os.cpu_count() # print(f"NJOBS: {NJOBS}") # set_loky_pickler() dnfm_box: Universe = from_file(MODEL_DIR / "DNFM_box.i").universe dnfm: Universe = from_file(MODEL_DIR / "DNFM_NEW_LOC.i").universe envelopes: Universe = from_file(CMODEL_ROOT / "cmodel.universes/envelopes.i").universe dnfm_box.rename(start_surf=3300, start_cell=1100) new_cells: List[Card] = [] box = dnfm_box[0].shape.complement() for c in envelopes: c.options.pop("FILL", None) if c.name() in {64, 85, 165}: new_cells.append(c.intersection(box).simplify(min_volume=0.1)) else: new_cells.append(c) new_cells.append(dnfm_box[0]) new_univ = Universe(new_cells, name_rule="keep") new_univ.save(MODEL_DIR / "new_env_v1.i")
def decompose(output, fill_descriptor_path, source, override): logger = logging.getLogger(__name__) logger.debug("Loading model from %s", source) source = Path(source) if output is None: output = get_default_output_directory(source) else: output = Path(output) output.mkdir(parents=True, exist_ok=True) fill_descriptor = tk.document() fill_descriptor.add( tk.comment(f"This is a decomposition of \"{source.name}\" model")) parse_result: ParseResult = from_file(source) if parse_result.title: fill_descriptor.append("title", parse_result.title) model: Universe = parse_result.universe if model.comment: fill_descriptor.append("comment", model.comment) fill_descriptor.append("created", datetime.now()) fill_descriptor.add(tk.nl()) already_processed_universes = set() for c in model: fill = c.options.pop('FILL', None) if fill: universe = fill['universe'] words = [f'FILL={universe.name()}'] transform = fill.get('transform', None) if transform: words[0] = '*' + words[0] words.append('(') words.extend(transform.get_words()) words.append(')') comm = c.options.get('comment', []) comm.append(''.join(words)) c.options['comment'] = comm descriptor = tk.table() universe_name = universe.name() fn = f'u{universe_name}.i' descriptor['universe'] = universe_name if transform: name = transform.name() if name is None: # The transformation is anonymous, so, store it's specification # omitting redundant '*', TR0 words, and interleaving space tokens descriptor['transform'] = tk.array( transform.mcnp_words()[2:][1::2]) else: descriptor['transform'] = name descriptor['file'] = fn fill_descriptor.append(str(c.name()), descriptor) fill_descriptor.add(tk.nl()) if universe_name not in already_processed_universes: move_universe_attribute_to_comments(universe) save_mcnp(universe, output / fn, override) logger.debug("The universe %s has been saved to %s", universe_name, fn) already_processed_universes.add(universe_name) with open(output / fill_descriptor_path, "w") as fid: res = tk.dumps(fill_descriptor) fid.write(res) envelopes_path = output / "envelopes.i" save_mcnp(model, envelopes_path, override) logger.debug("The envelopes are saved to %s", envelopes_path)
def compose(output, fill_descriptor_path, source, override): logger.info("Loading model from {s}", s=source) parse_result: ParseResult = from_file(source) envelopes = parse_result.universe source = Path(source) universes_dir = source.absolute().parent assert universes_dir.is_dir() logger.info("Loading fill-descriptor from {f}", f=fill_descriptor_path) with fill_descriptor_path.open() as fid: fill_descriptor = tk.parse(fid.read()) universes = load_universes(fill_descriptor, universes_dir) named_transformations = load_named_transformations(fill_descriptor) comps = {} for k, v in universes.items(): u, _ = v cps = u.get_compositions() comps[k] = {c for c in cps} common = reduce(set.union, comps.values()) envelopes.set_common_materials(common) cells_index = dict((cell.name(), cell) for cell in envelopes) for i, spec in universes.items(): universe, transformation = spec universe.set_common_materials(common) cell = cells_index[i] cell.options = filter_dict(cell.options, "original") cell.options["FILL"] = {"universe": universe} if transformation is not None: if isinstance(transformation, tk_items.Array): transformation1 = np.fromiter(map(float, iter(transformation)), dtype=np.double) try: translation = transformation1[:3] if len(transformation1) > 3: rotation = transformation1[3:] else: rotation = None transformation2 = mk.Transformation( translation=translation, rotation=rotation, indegrees= True, # Assuming that on decompose we store a transformation in degrees as well ) except ValueError as ex: raise ValueError( f"Failed to process FILL transformation in cell #{cell.name()} of universe #{universe.name()}" ) from ex cell.options["FILL"]["transform"] = transformation2 elif isinstance(transformation, tk_items.Integer): assert ( named_transformations is not None ), "There are no named transformations in the fill descriptor file" transformation1 = named_transformations[int(transformation)] cell.options["FILL"]["transform"] = transformation1 else: raise NotImplementedError( f"Unexpected type of transformation parameter {type(transformation)}" ) save_mcnp(envelopes, output, override)
def test_mcnp_parser(parse_file, expected): parse_file = file_resolver(parse_file) result: ParseResult = from_file(parse_file) assert expected["title"] == result.sections.title
def compose(output, fill_descriptor_path, source, override): parse_result: ParseResult = from_file(source) envelopes = parse_result.universe source = Path(source) universes_dir = source.absolute().parent assert universes_dir.is_dir() with fill_descriptor_path.open() as fid: fill_descriptor = tk.parse(fid.read()) universes = {} for k, v in fill_descriptor.items(): if isinstance(v, dict) and 'universe' in v: cell_name = int(k) universe_name = int(v['universe']) transformation = v.get('transform', None) universe_path = Path(v['file']) if not universe_path.exists(): universe_path = universes_dir / universe_path if not universe_path.exists(): raise FileNotFoundError(universe_path) parse_result: ParseResult = from_file(universe_path) universe: mk.Universe = parse_result.universe universe.rename(name=universe_name) universes[cell_name] = (universe, transformation) comps = {} for k, v in universes.items(): u, _ = v cps = u.get_compositions() comps[k] = {c for c in cps} common = reduce(set.union, comps.values()) envelopes.set_common_materials(common) cells_index = dict((cell.name(), cell) for cell in envelopes) for i, spec in universes.items(): universe, transformation = spec universe.set_common_materials(common) cell = cells_index[i] cell.options = filter_dict(cell.options, "original") cell.options["FILL"] = {"universe": universe} if transformation is not None: if isinstance(transformation, tk.array): transformation = np.fromiter(map(float, iter(transformation)), dtype=np.double) transformation = mk.Transformation( translation=transformation[:3], rotation=transformation[3:], indegrees=True, ) cell.options["FILL"]["transform"] = transformation else: # TODO dvp: use parse results to implement this: there's an index of transformations raise NotImplementedError("""\ Specification of fill with a universe with a named transformation "fill=<...> ( number )" occurs. \ Only anonymous transformations are implemented.\ """) save_mcnp(envelopes, output, override)
def decompose(output, fill_descriptor_path, source, override): logger.debug("Loading model from {}", source) source = Path(source) if output is None: output = get_default_output_directory(source) else: output = Path(output) output.mkdir(parents=True, exist_ok=True) fill_descriptor = tk.document() fill_descriptor.add( tk.comment(f'This is a decomposition of "{source.name}" model')) parse_result: ParseResult = from_file(source) if parse_result.title: fill_descriptor.append("title", parse_result.title) model: Universe = parse_result.universe if model.comment: fill_descriptor.append("comment", model.comment) named_transformations = list(collect_transformations(model)) fill_descriptor.append("created", item(datetime.now())) fill_descriptor.add(tk.nl()) already_processed_universes = set() for c in model: fill = c.options.pop("FILL", None) if fill: universe = fill["universe"] words = [f"FILL={universe.name()}"] transform = fill.get("transform", None) if transform: words[0] = "*" + words[0] words.append("(") words.extend(transform.get_words()) words.append(")") comm = c.options.get("comment", []) comm.append("".join(words)) c.options["comment"] = comm descriptor = tk.table() universe_name = universe.name() fn = f"u{universe_name}.i" descriptor["universe"] = universe_name if transform: name = transform.name() if name is None: # The transformation is anonymous, so, store it's specification # omitting redundant '*', TR0 words, and interleaving space tokens descriptor["transform"] = tk.array( transform.mcnp_words()[2:][1::2]) else: descriptor["transform"] = name descriptor["file"] = fn fill_descriptor.append(str(c.name()), descriptor) fill_descriptor.add(tk.nl()) if universe_name not in already_processed_universes: move_universe_attribute_to_comments(universe) save_mcnp(universe, output / fn, override) logger.debug("The universe {} has been saved to {}", universe_name, fn) already_processed_universes.add(universe_name) named_transformations_descriptor = tk.table() named_transformations = sorted(named_transformations, key=lambda x: x.name()) for t in named_transformations: named_transformations_descriptor[f"tr{t.name()}"] = tk.array( t.mcnp_words()[2:][1::2]) fill_descriptor.append("named_transformations", named_transformations_descriptor) fill_descriptor.add(tk.nl()) fdp = output / fill_descriptor_path with open(fdp, "w") as fid: res = tk.dumps(fill_descriptor) fid.write(res) logger.debug("Fill descriptor is saved in {}", fdp) envelopes_path = output / "envelopes.i" save_mcnp(model, envelopes_path, override) logger.debug("The envelopes are saved to {}", envelopes_path)