def create_docker_image(filenames, MPI_version=None): log.info("Creating Docker image...") if not MPI_version: MPI_version = "3.2" # log.warning("You have not specified an MPICH version. " # "It is strongly encouraged to request the one installed in your cluster," # " using '--mpi-version X.Y'. Defaulting to MPICH v%s.", MPI_version) dc = get_docker_client() modules = yaml_dump(get_used_modules(*[load_input(f) for f in filenames])).strip() echos_reqs = "RUN " + " && \\ \n ".join([ r'echo "%s" >> %s' % (block, requirements_file_path) for block in modules.split("\n") ]) echos_help = "RUN " + " && \\ \n ".join([ r'echo "%s" >> %s' % (line, help_file_path) for line in image_help("docker").split("\n") ]) recipe = r""" FROM cobaya/base_mpich_%s:latest %s RUN cobaya-install %s --%s %s --just-code --force ### NEEDS PYTHON UPDATE! --no-progress-bars %s CMD ["cat", "%s"] """ % (MPI_version, echos_reqs, requirements_file_path, _modules_path_arg, _modules_path, echos_help, help_file_path) image_name = "cobaya:" + uuid.uuid4().hex[:6] with StringIO(recipe) as stream: dc.images.build(fileobj=stream, tag=image_name) log.info( "Docker image '%s' created! " "Do 'docker save %s | gzip > some_name.tar.gz'" "to save it to the current folder.", image_name, image_name)
def create_singularity_image(filenames, MPI_version=None): log.info("Creating Singularity image...") if not MPI_version: MPI_version = "2.1.1" # log.warning("You have not specified an OpenMPI version. " # "It is strongly encouraged to request the one installed in your cluster," # " using '--mpi-version X.Y.Z'. Defaulting to OpenMPI v%s.", MPI_version) modules = yaml_dump(get_used_modules(*[load_input(f) for f in filenames])).strip() echos_reqs = "\n " + "\n ".join([""] + [ 'echo "%s" >> %s' % (block, requirements_file_path) for block in modules.split("\n") ]) recipe = (dedent(""" Bootstrap: docker From: cobaya/base_openmpi_%s:latest\n %%post\n""" % MPI_version) + dedent(echos_reqs) + dedent(""" export CONTAINED=TRUE cobaya-install %s --%s %s --just-code --force ### --no-progress-bars mkdir %s %%help %s """ % (requirements_file_path, _modules_path, os.path.join(_modules_path_arg, _modules_path, _data), "\n ".join(image_help("singularity").split("\n")[1:])))) with NamedTemporaryFile(delete=False) as recipe_file: recipe_file.write(recipe) recipe_file_name = recipe_file.name image_name = "cobaya_" + uuid.uuid4().hex[:6] + ".simg" process_build = Popen( ["singularity", "build", image_name, recipe_file_name], stdout=PIPE, stderr=PIPE) out, err = process_build.communicate() if process_build.returncode: log.info(out) log.info(err) raise LoggedError(log, "Image creation failed! See error message above.") log.info("Singularity image '%s' created!", image_name)
def install(*infos, **kwargs): if not log.root.handlers: logger_setup() path = kwargs.get("path", ".") if not path: # See if we can get one (and only one) from infos paths = set( [p for p in [info.get(_path_install) for info in infos] if p]) if len(paths) == 1: path = paths[0] else: print("logging?") raise LoggedError( log, "No 'path' argument given and could not extract one (and only one) " "from the infos.") abspath = os.path.abspath(path) log.info("Installing modules at '%s'\n", abspath) kwargs_install = { "force": kwargs.get("force", False), "no_progress_bars": kwargs.get("no_progress_bars") } for what in (_code, _data): kwargs_install[what] = kwargs.get(what, True) spath = os.path.join(abspath, what) if kwargs_install[what] and not os.path.exists(spath): try: os.makedirs(spath) except OSError: raise LoggedError( log, "Could not create the desired installation folder '%s'", spath) failed_modules = [] skip_list = os.environ.get("COBAYA_TEST_SKIP", "").replace(",", " ").lower().split() for kind, modules in get_used_modules(*infos).items(): for module in modules: print(create_banner(kind + ":" + module, symbol="=", length=80)) if len([s for s in skip_list if s in module.lower()]): log.info("Skipping %s for test skip list %s" % (module, skip_list)) continue module_folder = get_class_module(module, kind) try: imported_module = import_module(module_folder, package=_package) imported_class = get_class(module, kind) if len([ s for s in skip_list if s in imported_class.__name__.lower() ]): log.info("Skipping %s for test skip list %s" % (imported_class.__name__, skip_list)) continue except ImportError as e: if kind == _likelihood: info = (next(info for info in infos if module in info.get( _likelihood, {}))[_likelihood][module]) or {} if isinstance(info, string_types) or _external in info: log.warning( "Module '%s' is a custom likelihood. " "Nothing to do.\n", module) else: log.error("Module '%s' not recognized. [%s]\n" % (module, e)) failed_modules += ["%s:%s" % (kind, module)] continue is_installed = getattr( imported_class, "is_installed", getattr(imported_module, "is_installed", None)) if is_installed is None: log.info("Built-in module: nothing to do.\n") continue if is_installed(path=abspath, **kwargs_install): log.info("External module already installed.\n") if kwargs.get("just_check", False): continue if kwargs_install["force"]: log.info("Forcing re-installation, as requested.") else: log.info("Doing nothing.\n") continue else: if kwargs.get("just_check", False): log.info("NOT INSTALLED!\n") continue try: install_this = getattr( imported_class, "install", getattr(imported_module, "install", None)) success = install_this(path=abspath, **kwargs_install) except: traceback.print_exception(*sys.exc_info(), file=sys.stdout) log.error( "An unknown error occurred. Delete the modules folder and try " "again. Notify the developers if this error persists.") success = False if success: log.info("Successfully installed!\n") else: log.error( "Installation failed! Look at the error messages above. " "Solve them and try again, or, if you are unable to solve, " "install this module manually.") failed_modules += ["%s:%s" % (kind, module)] continue # test installation if not is_installed(path=abspath, **kwargs_install): log.error( "Installation apparently worked, " "but the subsequent installation test failed! " "Look at the error messages above. " "Solve them and try again, or, if you are unable to solve, " "install this module manually.") failed_modules += ["%s:%s" % (kind, module)] if failed_modules: bullet = "\n - " raise LoggedError( log, "The installation (or installation test) of some module(s) has failed: " "%s\nCheck output of the installer of each module above " "for precise error info.\n", bullet + bullet.join(failed_modules))
def makeGrid(batchPath, settingName=None, settings=None, read_only=False, interactive=False, install_reqs_at=None, install_reqs_force=None): print("Generating grid...") batchPath = os.path.abspath(batchPath) + os.sep if not settings: if not settingName: raise NotImplementedError("Re-using previous batch is work in progress...") # if not pathIsGrid(batchPath): # raise Exception('Need to give name of setting file if batchPath/config ' # 'does not exist') # read_only = True # sys.path.insert(0, batchPath + 'config') # sys.modules['batchJob'] = batchjob # old name # settings = __import__(IniFile(batchPath + 'config/config.ini').params['setting_file'].replace('.py', '')) elif os.path.splitext(settingName)[-1].lower() in _yaml_extensions: settings = yaml_load_file(settingName) else: raise NotImplementedError("Using a python script is work in progress...") # In this case, info-as-dict would be passed # settings = __import__(settingName, fromlist=['dummy']) batch = batchjob.batchJob(batchPath, settings.get("yaml_dir", None)) # batch.skip = settings.get("skip", False) batch.makeItems(settings, messages=not read_only) if read_only: for jobItem in [b for b in batch.jobItems]: if not jobItem.chainExists(): batch.jobItems.remove(jobItem) batch.save() print('OK, configured grid with %u existing chains' % (len(batch.jobItems))) return batch else: batch.makeDirectories(setting_file=None) batch.save() infos = {} modules_used = {} # Default info defaults = copy.deepcopy(settings) grid_definition = defaults.pop("grid") models_definitions = grid_definition["models"] datasets_definitions = grid_definition["datasets"] for jobItem in batch.items(wantSubItems=False): # Model info jobItem.makeChainPath() try: model_info = copy.deepcopy(models_definitions[jobItem.param_set] or {}) except KeyError: raise ValueError("Model '%s' must be defined." % jobItem.param_set) model_info = merge_info(defaults, model_info) # Dataset info try: dataset_info = copy.deepcopy(datasets_definitions[jobItem.data_set.tag]) except KeyError: raise ValueError("Data set '%s' must be defined." % jobItem.data_set.tag) # Combined info combined_info = merge_info(defaults, model_info, dataset_info) if "preset" in combined_info: preset = combined_info.pop("preset") combined_info = merge_info(create_input(**preset), combined_info) combined_info[_output_prefix] = jobItem.chainRoot # Requisites modules_used = get_used_modules(modules_used, combined_info) if install_reqs_at: combined_info[_path_install] = os.path.abspath(install_reqs_at) # Save the info (we will write it after installation: # we need to install to add auto covmats if jobItem.param_set not in infos: infos[jobItem.param_set] = {} infos[jobItem.param_set][jobItem.data_set.tag] = combined_info # Installing requisites if install_reqs_at: print("Installing required code and data for the grid.") from cobaya.log import logger_setup logger_setup() install_reqs(modules_used, path=install_reqs_at, force=install_reqs_force) print("Adding covmats (if necessary) and writing input files") for jobItem in batch.items(wantSubItems=False): info = infos[jobItem.param_set][jobItem.data_set.tag] # Covariance matrices # We try to find them now, instead of at run time, to check if correctly selected try: sampler = list(info[_sampler])[0] except KeyError: raise ValueError("No sampler has been chosen") if sampler == "mcmc" and info[_sampler][sampler].get("covmat", "auto"): modules_path = install_reqs_at or info.get(_path_install, None) if not modules_path: raise ValueError("Cannot assign automatic covariance matrices because no " "modules path has been defined.") # Need updated info for covmats: includes renames updated_info = update_info(info) # Ideally, we use slow+sampled parameters to look for the covariance matrix # but since for that we'd need to initialise a model, we approximate that set # as theory+sampled from itertools import chain like_params = set(chain(*[ list(like[_params]) for like in updated_info[_likelihood].values()])) params_info = {p: v for p, v in updated_info[_params].items() if is_sampled_param(v) and p not in like_params} best_covmat = get_best_covmat( os.path.abspath(modules_path), params_info, updated_info[_likelihood]) info[_sampler][sampler]["covmat"] = os.path.join( best_covmat["folder"], best_covmat["name"]) # Write the info for this job try: yaml_dump_file(jobItem.iniFile(), info, error_if_exists=True) except IOError: raise IOError("Can't write chain input file. Maybe the chain configuration " "files already exists?") # Non-translated old code # if not start_at_bestfit: # setMinimize(jobItem, ini) # variant = '_minimize' # ini.saveFile(jobItem.iniFile(variant)) ## NOT IMPLEMENTED: start at best fit ## ini.params['start_at_bestfit'] = start_at_bestfit # --- # for deffile in settings.defaults: # ini.defaults.append(batch.commonPath + deffile) # if hasattr(settings, 'override_defaults'): # ini.defaults = [batch.commonPath + deffile for deffile in settings.override_defaults] + ini.defaults # --- # # add ini files for importance sampling runs # for imp in jobItem.importanceJobs(): # if getattr(imp, 'importanceFilter', None): continue # if batch.hasName(imp.name.replace('_post', '')): # raise Exception('importance sampling something you already have?') # for minimize in (False, True): # if minimize and not getattr(imp, 'want_minimize', True): continue # ini = IniFile() # updateIniParams(ini, imp.importanceSettings, batch.commonPath) # if cosmomcAction == 0 and not minimize: # for deffile in settings.importanceDefaults: # ini.defaults.append(batch.commonPath + deffile) # ini.params['redo_outroot'] = imp.chainRoot # ini.params['action'] = 1 # else: # ini.params['file_root'] = imp.chainRoot # if minimize: # setMinimize(jobItem, ini) # variant = '_minimize' # else: # variant = '' # ini.defaults.append(jobItem.iniFile()) # ini.saveFile(imp.iniFile(variant)) # if cosmomcAction != 0: break if not interactive: return batch print('Done... to run do: cobaya-grid-run %s' % batchPath)
def get_bib_info(*infos): blocks_text = odict([["Cobaya", "[Paper in preparation]"]]) for kind, modules in get_used_modules(*infos).items(): for module in modules: blocks_text["%s:%s" % (kind, module)] = get_bib_module(module, kind) return blocks_text