def check(source): result = 0 logger.info("Check model {}", source) source = Path(source) parse_result: ParseResult = from_file(source) model = parse_result.universe logger.debug("Read the model okay") universes = model.get_universes() check_duplicates(universes, "universe", Universe.name) cells = [] surfaces = [] transformations = [] compositions = [] for u in universes: cells.extend(u) surfaces.extend(u.get_surfaces(inner=False)) transformations_to_add = collect_transformations(u) if transformations_to_add: transformations.extend(transformations_to_add) compositions.extend(u.get_compositions()) if not transformations: transformations = None if not compositions: compositions = None check_duplicates(cells, "cell", Card.name) check_duplicates(surfaces, "surface", Card.name) check_duplicates(transformations, "transformation", Card.name) check_duplicates(compositions, "composition", Card.name) return result
def transform( output: Path, transformation: str, transformations: Path, source: Path, override: bool, ) -> None: logger.info("Transforming model from {s}", s=source) if output.exists() and not override: raise FileExistsError( f"File {output} already exists. Remove it or use --override option" ) parse_result: ParseResult = from_file(source) src: Universe = parse_result.universe trans = int(transformation) logger.debug("Loading transformations from {}", transformations) transformations_text = transformations.read_text() transformations_list = list( map( lambda c: parse_transformation(c.text), clean_mcnp_cards(split_to_cards(transformations_text)), ) ) transformations_index = IndexOfNamed.from_iterable( transformations_list, on_duplicate=raise_on_duplicate_strategy, ) if trans not in transformations_index: raise ValueError(f"Transformation {trans} is not found in {transformations}") the_transformation = transformations_index[trans] dst = src.transform(the_transformation) save_mcnp(dst, output, override)
def split(output, source, separators): """Splits MCNP model to text portions (opposite to concat)""" if output is None: output = get_default_output_directory(source, ".split") else: output = Path(output) output.mkdir(parents=True, exist_ok=True) logger.info('Splitting "{source}" to directory "{output}"', source=source, output=output) return do_split(output, source, context["OVERRIDE"], separators)
def mckit(verbose: bool, quiet: bool, logfile: bool, profile_mem: bool, override: bool) -> None: if quiet: logger.level("WARNING") if verbose: logger.level("TRACE") logger.info("Running {}", NAME) logger.debug("Working dir {}", Path(".").absolute()) # # TODO dvp: add customized logger configuring from a configuration toml-file. # ensure that ctx.obj exists and is a dict (in case `cli()` is called # by means other than the `if` block below # obj = ctx.ensure_object(dict) # obj["DEBUG"] = debug context["OVERRIDE"] = override
def transform( output: click.STRING, transformation: click.STRING, transformations: click.Path, source: click.Path, ) -> None: """Transform MCNP model(s) with one of specified transformatio.""" do_transform( Path(output), transformation, Path(str(transformations)), Path(str(source)), context["OVERRIDE"], ) logger.info("File {} is transformed to {}", source, output)
def compose(output, fill_descriptor, source): """Merge universes and envelopes into MCNP model using merge descriptor""" if fill_descriptor is None: fill_descriptor = Path( source).absolute().parent / "fill-descriptor.toml" else: fill_descriptor = Path(fill_descriptor) if not fill_descriptor.exists(): raise click.UsageError( f'Cannot find fill descriptor file "{fill_descriptor}"') logger.info( 'Composing "{output}", from envelopes "{source}" with fill descriptor "{fill_descriptor}"', output=output, source=source, fill_descriptor=fill_descriptor, ) return do_compose(output, fill_descriptor, source, context["OVERRIDE"])
def load_universes( fill_descriptor, universes_dir ) -> Dict[int, Tuple[mk.Universe, Union[int, List[float]]]]: filler_path_map: Dict[int, Tuple[Path, mk.Universe]] = dict() cell_filler_map: Dict[int, Tuple[mk.Universe, Union[int, List[float]]]] = dict() for k, v in fill_descriptor.items(): if isinstance(v, dict) and "universe" in v: cell_name = int(k) universe_name = int(v["universe"]) transformation = v.get("transform", None) universe_path = Path(v["file"]) if universe_name in filler_path_map: prev_path, prev_universe = filler_path_map[universe_name] if prev_path == universe_path: universe = prev_universe else: raise ValueError(f'\n \ Filler number {universe_name} specifies different paths to load \n \ "{prev_path}" and "{universe_path}"') else: if universe_path.exists(): load_path = universe_path else: load_path = universes_dir / universe_path if not load_path.exists(): raise FileNotFoundError(universe_path) logger.info("Loading file {u}", u=load_path) parse_result: ParseResult = from_file(load_path) universe = parse_result.universe universe.rename(name=universe_name) filler_path_map[universe_name] = (universe_path, universe) cell_filler_map[cell_name] = (universe, transformation) return cell_filler_map
import os from pathlib import Path from typing import List import dotenv from mckit.parser.mcnp_input_sly_parser import Card, Universe, from_file from mckit.utils import assert_all_paths_exist, get_root_dir from mckit.utils.logging import logger as LOG dotenv.load_dotenv(dotenv_path=".env", verbose=True) DNFM_ROOT: Path = get_root_dir("DNFM_ROOT", "~/dev/mcnp/dnfm") CMODEL_ROOT: Path = get_root_dir("CMODEL_ROOT", "~/dev/mcnp/cmodel") MODEL_DIR: Path = DNFM_ROOT / "models/c-model" LOG.info("DNFM_ROOT=%s", DNFM_ROOT) LOG.info("CMODEL_ROOT=%s", CMODEL_ROOT) assert_all_paths_exist(CMODEL_ROOT, DNFM_ROOT, MODEL_DIR) NJOBS = os.cpu_count() # print(f"NJOBS: {NJOBS}") # set_loky_pickler() dnfm_box: Universe = from_file(MODEL_DIR / "DNFM_box.i").universe dnfm: Universe = from_file(MODEL_DIR / "DNFM_NEW_LOC.i").universe envelopes: Universe = from_file(CMODEL_ROOT / "cmodel.universes/envelopes.i").universe dnfm_box.rename(start_surf=3300, start_cell=1100) new_cells: List[Card] = [] box = dnfm_box[0].shape.complement() for c in envelopes: c.options.pop("FILL", None) if c.name() in {64, 85, 165}: new_cells.append(c.intersection(box).simplify(min_volume=0.1))
def compose(output, fill_descriptor_path, source, override): logger.info("Loading model from {s}", s=source) parse_result: ParseResult = from_file(source) envelopes = parse_result.universe source = Path(source) universes_dir = source.absolute().parent assert universes_dir.is_dir() logger.info("Loading fill-descriptor from {f}", f=fill_descriptor_path) with fill_descriptor_path.open() as fid: fill_descriptor = tk.parse(fid.read()) universes = load_universes(fill_descriptor, universes_dir) named_transformations = load_named_transformations(fill_descriptor) comps = {} for k, v in universes.items(): u, _ = v cps = u.get_compositions() comps[k] = {c for c in cps} common = reduce(set.union, comps.values()) envelopes.set_common_materials(common) cells_index = dict((cell.name(), cell) for cell in envelopes) for i, spec in universes.items(): universe, transformation = spec universe.set_common_materials(common) cell = cells_index[i] cell.options = filter_dict(cell.options, "original") cell.options["FILL"] = {"universe": universe} if transformation is not None: if isinstance(transformation, tk_items.Array): transformation1 = np.fromiter(map(float, iter(transformation)), dtype=np.double) try: translation = transformation1[:3] if len(transformation1) > 3: rotation = transformation1[3:] else: rotation = None transformation2 = mk.Transformation( translation=translation, rotation=rotation, indegrees= True, # Assuming that on decompose we store a transformation in degrees as well ) except ValueError as ex: raise ValueError( f"Failed to process FILL transformation in cell #{cell.name()} of universe #{universe.name()}" ) from ex cell.options["FILL"]["transform"] = transformation2 elif isinstance(transformation, tk_items.Integer): assert ( named_transformations is not None ), "There are no named transformations in the fill descriptor file" transformation1 = named_transformations[int(transformation)] cell.options["FILL"]["transform"] = transformation1 else: raise NotImplementedError( f"Unexpected type of transformation parameter {type(transformation)}" ) save_mcnp(envelopes, output, override)
def decompose(output, fill_descriptor, source): """Separate an MCNP model to envelopes and filling universes""" logger.info(f"Processing {source}") return do_decompose(output, fill_descriptor, source, context["OVERRIDE"])
def main(): # new_cells.extend(b_model) LOG.info("Loading antenna envelop") antenna_envelop = load_model(str(HFSR_ROOT / "models/antenna/box.i")) LOG.info("Attaching bounding boxes to antenna envelop") attach_bounding_boxes( antenna_envelop, tolerance=5.0, chunksize=max(len(antenna_envelop) // os.cpu_count(), 1), ) LOG.info("Loading c-model envelopes") envelopes = load_model( str(CMODEL_ROOT / "simple_cubes.universes/envelopes.i")) cells_to_fill = [11, 14, 75] cells_to_fill_indexes = [c - 1 for c in cells_to_fill] LOG.info("Attaching bounding boxes to c-model envelopes %s", cells_to_fill) attach_bounding_boxes([envelopes[i] for i in cells_to_fill_indexes], tolerance=5.0, chunksize=1) # attach_bounding_boxes((envelopes), tolerance=10.0, chunksize=5) LOG.info("Backing up original envelopes") envelopes_original = envelopes.copy() antenna_envelop.rename(start_cell=200000, start_surf=200000) LOG.info("Subtracting antenna envelop from c-model envelopes %s", cells_to_fill) envelopes = subtract_model_from_model( envelopes, antenna_envelop, cells_filter=lambda c: c in cells_to_fill) LOG.info("Adding antenna envelop to c-model envelopes") envelopes.add_cells(antenna_envelop, name_rule="clash") envelopes_path = "envelopes+antenna-envelop.i" envelopes.save(envelopes_path) LOG.info("The envelopes are saved to %s", envelopes_path) # def load_subtracted_universe(universe_name): # new_universe_path = Path(f"u{universe_name}-ae.i") # if new_universe_path.exists(): # LOG.info(f"Loading filler {universe_name}") # universe = load_model(new_universe_path) # else: # st = time.time() # universe_path = universes_dir / f"u{universe_name}.i" # LOG.info("Subtracting antenna envelope from the original filler %s", universe_name) # universe: mk.Universe = mk.read_mcnp(universe_path, encoding="cp1251") # LOG.info("Size %d", len(universe)) # attach_bounding_boxes( # universe, # tolerance=100.0, # chunksize=max(len(universe) // os.cpu_count(), 1), # ) # et = time.time() # LOG.info(f"Elapsed time on attaching bounding boxes: %.2f min", (et - st)/60) # st = time.time() # universe = subtract_model_from_model(universe, antenna_envelop) # et = time.time() # LOG.info(f"Elapsed time on subtracting filler %d, : %.2f min", universe_name, (et - st)/60) # for c in universe._cells: # del c.options['comment'] # universe.rename(name=universe_name) # universe.save(str(new_universe_path)) # LOG.info("Universe %d is saved to %s", universe_name, new_universe_path) # return universe # # # universes = list(map(load_subtracted_universe, cells_to_fill)) universes = list(map(load_filler, cells_to_fill)) antenna = load_model(HFSR_ROOT / "models/antenna/antenna.i") antenna.rename(210000, 210000, 210000, 210000, name=210) for i, filler in zip(cells_to_fill_indexes, universes): envelopes[i].options["FILL"] = {"universe": filler} added_cells = len(antenna_envelop) for c in envelopes[-added_cells:]: c.options["FILL"] = {"universe": antenna} set_common_materials(envelopes) # def delete_subtracted_universe(universe_name): # new_universe_path = Path(f"u{universe_name}-ae.i") # new_universe_path.unlink() # foreach(delete_subtracted_universe, cells_to_fill) envelopes_surrounding_and_antenna_file = "ewfa_3.i" envelopes.save(envelopes_surrounding_and_antenna_file) LOG.info( 'c-model envelopes integrated with universes and antenna is saved to "%s"', envelopes_surrounding_and_antenna_file, )
def load_filler(universe_name): universe_path = universes_dir / f"u{universe_name}.i" LOG.info(f"Loading filler {universe_name}") universe = load_model(universe_path) universe.rename(name=universe_name) return universe
import mckit as mk from mckit.box import Box from mckit.utils.logging import logger as LOG def select_from(cell: mk.Body, to_select: np.ndarray) -> bool: name: int = cell.name() index: int = to_select.searchsorted(name) return index < to_select.size and to_select[index] == name dotenv.load_dotenv(dotenv_path=".env", verbose=True) HFSR_ROOT = get_root_dir("HFSR_ROOT", "~/dev/mcnp/hfsr") CMODEL_ROOT = get_root_dir("CMODEL_ROOT", "~/dev/mcnp/c-model") LOG.info("HFSR_ROOT=%s", HFSR_ROOT) LOG.info("CMODEL_ROOT=%s", CMODEL_ROOT) assert_all_paths_exist(HFSR_ROOT, CMODEL_ROOT) universes_dir = CMODEL_ROOT / "simple_cubes.universes" # assert universes_dir.is_dir() NJOBS = os.cpu_count() # print(f"NJOBS: {NJOBS}") # set_loky_pickler() class BoundingBoxAdder(object): def __init__(self, tolerance: float): self.tolerance = tolerance def __call__(self, cell: mk.Body): box = cell.shape.bounding_box(tol=self.tolerance)