def main(): """Run MOSFiT.""" prt = Printer(wrap_length=100, quiet=False, language='en', exit_on_prompt=False) parser = get_parser(only='language') args, remaining = parser.parse_known_args() if args.language == 'en': loc = locale.getlocale() if loc[0]: args.language = loc[0].split('_')[0] if args.language != 'en': try: from googletrans.constants import LANGUAGES except Exception: raise RuntimeError('`--language` requires `googletrans` package, ' 'install with `pip install googletrans`.') if args.language == 'select' or args.language not in LANGUAGES: languages = list( sorted([ LANGUAGES[x].title().replace('_', ' ') + ' (' + x + ')' for x in LANGUAGES ])) sel = prt.prompt('Select a language:', kind='select', options=languages, message=False) args.language = sel.split('(')[-1].strip(')') prt = Printer(language=args.language) language = args.language parser = get_parser(printer=prt) args = parser.parse_args() args.language = language prt = Printer(wrap_length=100, quiet=args.quiet, language=args.language, exit_on_prompt=args.exit_on_prompt) if args.version: print('MOSFiT v{}'.format(__version__)) return dir_path = os.path.dirname(os.path.realpath(__file__)) if args.speak: speak('Mosfit', args.speak) args.start_time = time.time() if args.limiting_magnitude == []: args.limiting_magnitude = 20.0 args.return_fits = False if (isinstance(args.extrapolate_time, list) and len(args.extrapolate_time) == 0): args.extrapolate_time = 100.0 if len(args.band_list) and args.smooth_times == -1: prt.message('enabling_s') args.smooth_times = 0 args.method = 'nester' if args.method.lower() in [ 'nest', 'nested', 'nested_sampler', 'nester' ] else 'ensembler' if is_master(): if args.method == 'nester': unused_args = [[args.burn, '-b'], [args.post_burn, '-p'], [args.frack_step, '-f'], [args.num_temps, '-T'], [args.run_until_uncorrelated, '-U'], [args.draw_above_likelihood, '-d'], [args.gibbs, '-g'], [args.save_full_chain, '-c'], [args.maximum_memory, '-M']] for ua in unused_args: if ua[0] is not None: prt.message('argument_not_used', reps=[ua[1], '-D nester'], warning=True) if args.method == 'nester': if args.run_until_converged and args.iterations >= 0: raise ValueError(prt.text('R_i_mutually_exclusive')) if args.walker_paths is not None: raise ValueError(prt.text('w_nester_mutually_exclusive')) if args.generative: if args.iterations > 0: prt.message('generative_supercedes', warning=True) args.iterations = 0 no_events = False if args.iterations == -1: if len(args.events) == 0: no_events = True args.iterations = 0 else: args.iterations = 5000 if len(args.date_list): if no_events: prt.message('no_dates_gen', warning=True) else: args.time_list = [ str(astrotime(x.replace('/', '-')).mjd) for x in args.date_list ] args.time_unit = 'mjd' if len(args.mjd_list): if no_events: prt.message('no_dates_gen', warning=True) else: args.time_list = args.mjd_list args.time_unit = 'mjd' if len(args.jd_list): if no_events: prt.message('no_dates_gen', warning=True) else: args.time_list = [ str(astrotime(float(x), format='jd').mjd) for x in args.jd_list ] args.time_unit = 'mjd' if len(args.phase_list): if no_events: prt.message('no_dates_gen', warning=True) else: args.time_list = args.phase_list args.time_unit = 'phase' if len(args.time_list): if any([any([y in x]) for y in ['-', '/'] for x in args.time_list]): try: args.time_list = [ astrotime(x.replace('/', '-')).mjd for x in args.time_list ] except ValueError: if len(args.time_list) == 1 and isinstance( args.time_list[0], string_types): args.time_list = args.time_list[0].split() args.time_list = [float(x) for x in args.time_list] args.time_unit = 'phase' else: if any(['+' in x for x in args.time_list]): args.time_unit = 'phase' args.time_list = [float(x) for x in args.time_list] if min(args.time_list) > 2400000: prt.message('assuming_jd') args.time_list = [x - 2400000.5 for x in args.time_list] args.time_unit = 'mjd' elif min(args.time_list) > 50000: prt.message('assuming_mjd') args.time_unit = 'mjd' args.time_unit = None if args.burn is None and args.post_burn is None: args.burn = int(np.floor(args.iterations / 2)) if args.frack_step == 0: args.fracking = False if (args.run_until_uncorrelated is not None and args.run_until_converged): raise ValueError( '`-R` and `-U` options are incompatible, please use one or the ' 'other.') if args.run_until_uncorrelated is not None: args.convergence_type = 'acor' args.convergence_criteria = args.run_until_uncorrelated elif args.run_until_converged: if args.method == 'ensembler': args.convergence_type = 'psrf' args.convergence_criteria = (1.1 if args.run_until_converged is True else args.run_until_converged) else: args.convergence_type = 'dlogz' if args.method == 'nester': args.convergence_criteria = (0.02 if args.run_until_converged is True else args.run_until_converged) if is_master(): # Get hash of ourselves mosfit_hash = get_mosfit_hash() # Print our amazing ASCII logo. if not args.quiet: with codecs.open(os.path.join(dir_path, 'logo.txt'), 'r', 'utf-8') as f: logo = f.read() firstline = logo.split('\n')[0] # if isinstance(firstline, bytes): # firstline = firstline.decode('utf-8') width = len(normalize('NFC', firstline)) prt.prt(logo, colorify=True) prt.message( 'byline', reps=[__version__, mosfit_hash, __author__, __contributors__], center=True, colorify=True, width=width, wrapped=False) # Get/set upload token upload_token = '' get_token_from_user = False if args.set_upload_token: if args.set_upload_token is not True: upload_token = args.set_upload_token get_token_from_user = True upload_token_path = os.path.join(dir_path, 'cache', 'dropbox.token') # Perform a few checks on upload before running (to keep size # manageable) if args.upload and not args.test and args.smooth_times > 100: response = prt.prompt('ul_warning_smooth') if response: args.upload = False else: sys.exit() if (args.upload and not args.test and args.num_walkers is not None and args.num_walkers < 100): response = prt.prompt('ul_warning_few_walkers') if response: args.upload = False else: sys.exit() if (args.upload and not args.test and args.num_walkers and args.num_walkers * args.num_temps > 500): response = prt.prompt('ul_warning_too_many_walkers') if response: args.upload = False else: sys.exit() if args.upload: if not os.path.isfile(upload_token_path): get_token_from_user = True else: with open(upload_token_path, 'r') as f: upload_token = f.read().splitlines() if len(upload_token) != 1: get_token_from_user = True elif len(upload_token[0]) != 64: get_token_from_user = True else: upload_token = upload_token[0] if get_token_from_user: if args.test: upload_token = ('1234567890abcdefghijklmnopqrstuvwxyz' '1234567890abcdefghijklmnopqr') while len(upload_token) != 64: prt.message('no_ul_token', ['https://sne.space/mosfit/'], wrapped=True) upload_token = prt.prompt('paste_token', kind='string') if len(upload_token) != 64: prt.prt( 'Error: Token must be exactly 64 characters in ' 'length.', wrapped=True) continue break with open_atomic(upload_token_path, 'w') as f: f.write(upload_token) if args.upload: prt.prt("Upload flag set, will upload results after completion.", wrapped=True) prt.prt("Dropbox token: " + upload_token, wrapped=True) args.upload_token = upload_token if no_events: prt.message('iterations_0', wrapped=True) # Create the user directory structure, if it doesn't already exist. if args.copy: prt.message('copying') fc = False if args.force_copy: fc = prt.prompt('force_copy') if not os.path.exists('jupyter'): os.mkdir(os.path.join('jupyter')) if not os.path.isfile(os.path.join('jupyter', 'mosfit.ipynb')) or fc: shutil.copy( os.path.join(dir_path, 'jupyter', 'mosfit.ipynb'), os.path.join(os.getcwd(), 'jupyter', 'mosfit.ipynb')) if not os.path.exists('modules'): os.mkdir(os.path.join('modules')) module_dirs = next(os.walk(os.path.join(dir_path, 'modules')))[1] for mdir in module_dirs: if mdir.startswith('__'): continue full_mdir = os.path.join(dir_path, 'modules', mdir) copy_path = os.path.join(full_mdir, '.copy') to_copy = [] if os.path.isfile(copy_path): to_copy = list( filter(None, open(copy_path, 'r').read().split())) mdir_path = os.path.join('modules', mdir) if not os.path.exists(mdir_path): os.mkdir(mdir_path) for tc in to_copy: tc_path = os.path.join(full_mdir, tc) if os.path.isfile(tc_path): shutil.copy(tc_path, os.path.join(mdir_path, tc)) elif os.path.isdir(tc_path) and not os.path.exists( os.path.join(mdir_path, tc)): os.mkdir(os.path.join(mdir_path, tc)) readme_path = os.path.join(mdir_path, 'README') if not os.path.exists(readme_path): txt = prt.message('readme-modules', [ os.path.join(dir_path, 'modules', 'mdir'), os.path.join(dir_path, 'modules') ], prt=False) open(readme_path, 'w').write(txt) if not os.path.exists('models'): os.mkdir(os.path.join('models')) model_dirs = next(os.walk(os.path.join(dir_path, 'models')))[1] for mdir in model_dirs: if mdir.startswith('__'): continue mdir_path = os.path.join('models', mdir) if not os.path.exists(mdir_path): os.mkdir(mdir_path) model_files = next( os.walk(os.path.join(dir_path, 'models', mdir)))[2] readme_path = os.path.join(mdir_path, 'README') if not os.path.exists(readme_path): txt = prt.message('readme-models', [ os.path.join(dir_path, 'models', mdir), os.path.join(dir_path, 'models') ], prt=False) with open(readme_path, 'w') as f: f.write(txt) for mfil in model_files: if 'parameters.json' not in mfil: continue fil_path = os.path.join(mdir_path, mfil) if os.path.isfile(fil_path) and not fc: continue shutil.copy(os.path.join(dir_path, 'models', mdir, mfil), os.path.join(fil_path)) # Set some default values that we checked above. if args.frack_step == 0: args.fracking = False elif args.frack_step is None: args.frack_step = 50 if args.burn is None and args.post_burn is None: args.burn = int(np.floor(args.iterations / 2)) if args.draw_above_likelihood is None: args.draw_above_likelihood = False if args.maximum_memory is None: args.maximum_memory = np.inf if args.gibbs is None: args.gibbs = False if args.save_full_chain is None: args.save_full_chain = False if args.num_temps is None: args.num_temps = 1 if args.walker_paths is None: args.walker_paths = [] # Then, fit the listed events with the listed models. fitargs = vars(args) Fitter(**fitargs).fit_events(**fitargs)
def __init__(self, parameter_path='parameters.json', model='', data={}, wrap_length=100, output_path='', pool=None, test=False, printer=None, fitter=None, print_trees=False): """Initialize `Model` object.""" from mosfit.fitter import Fitter self._model_name = model self._parameter_path = parameter_path self._output_path = output_path self._pool = SerialPool() if pool is None else pool self._is_master = pool.is_master() if pool else False self._wrap_length = wrap_length self._print_trees = print_trees self._inflect = inflect.engine() self._test = test self._inflections = {} self._references = OrderedDict() self._free_parameters = [] self._user_fixed_parameters = [] self._user_released_parameters = [] self._kinds_needed = set() self._kinds_supported = set() self._draw_limit_reached = False self._fitter = Fitter() if not fitter else fitter self._printer = self._fitter._printer if not printer else printer prt = self._printer self._dir_path = os.path.dirname(os.path.realpath(__file__)) # Load suggested model associations for transient types. if os.path.isfile(os.path.join('models', 'types.json')): types_path = os.path.join('models', 'types.json') else: types_path = os.path.join(self._dir_path, 'models', 'types.json') with open(types_path, 'r') as f: model_types = json.load(f, object_pairs_hook=OrderedDict) # Create list of all available models. all_models = set() if os.path.isdir('models'): all_models |= set(next(os.walk('models'))[1]) models_path = os.path.join(self._dir_path, 'models') if os.path.isdir(models_path): all_models |= set(next(os.walk(models_path))[1]) all_models = list(sorted(list(all_models))) if not self._model_name: claimed_type = None try: claimed_type = list( data.values())[0]['claimedtype'][0][QUANTITY.VALUE] except Exception: prt.message('no_model_type', warning=True) all_models_txt = prt.text('all_models') suggested_models_txt = prt.text('suggested_models', [claimed_type]) another_model_txt = prt.text('another_model') type_options = model_types.get(claimed_type, []) if claimed_type else [] if not type_options: type_options = all_models model_prompt_txt = all_models_txt else: type_options.append(another_model_txt) model_prompt_txt = suggested_models_txt if not type_options: prt.message('no_model_for_type', warning=True) else: while not self._model_name: if self._test: self._model_name = type_options[0] else: sel = self._printer.prompt( model_prompt_txt, kind='option', options=type_options, message=False, default='n', none_string=prt.text('none_above_models')) if sel is not None: self._model_name = type_options[int(sel) - 1] if not self._model_name: break if self._model_name == another_model_txt: type_options = all_models model_prompt_txt = all_models_txt self._model_name = None if not self._model_name: return # Load the basic model file. if os.path.isfile(os.path.join('models', 'model.json')): basic_model_path = os.path.join('models', 'model.json') else: basic_model_path = os.path.join(self._dir_path, 'models', 'model.json') with open(basic_model_path, 'r') as f: self._model = json.load(f, object_pairs_hook=OrderedDict) # Load the model file. model = self._model_name model_dir = self._model_name if '.json' in self._model_name: model_dir = self._model_name.split('.json')[0] else: model = self._model_name + '.json' if os.path.isfile(model): model_path = model else: # Look in local hierarchy first if os.path.isfile(os.path.join('models', model_dir, model)): model_path = os.path.join('models', model_dir, model) else: model_path = os.path.join(self._dir_path, 'models', model_dir, model) with open(model_path, 'r') as f: self._model.update(json.load(f, object_pairs_hook=OrderedDict)) # Find @ tags, store them, and prune them from `_model`. for tag in list(self._model.keys()): if tag.startswith('@'): if tag == '@references': self._references.setdefault('base', []).extend(self._model[tag]) del self._model[tag] # with open(os.path.join( # self.get_products_path(), # self._model_name + '.json'), 'w') as f: # json.dump(self._model, f) # Load model parameter file. model_pp = os.path.join(self._dir_path, 'models', model_dir, 'parameters.json') pp = '' local_pp = (self._parameter_path if '/' in self._parameter_path else os.path.join('models', model_dir, self._parameter_path)) if os.path.isfile(local_pp): selected_pp = local_pp else: selected_pp = os.path.join(self._dir_path, 'models', model_dir, self._parameter_path) # First try user-specified path if self._parameter_path and os.path.isfile(self._parameter_path): pp = self._parameter_path # Then try directory we are running from elif os.path.isfile('parameters.json'): pp = 'parameters.json' # Then try the model directory, with the user-specified name elif os.path.isfile(selected_pp): pp = selected_pp # Finally try model folder elif os.path.isfile(model_pp): pp = model_pp else: raise ValueError(prt.text('no_parameter_file')) if self._is_master: prt.message('files', [basic_model_path, model_path, pp], wrapped=False) with open(pp, 'r') as f: self._parameter_json = json.load(f, object_pairs_hook=OrderedDict) self._modules = OrderedDict() self._bands = [] self._instruments = [] self._telescopes = [] # Load the call tree for the model. Work our way in reverse from the # observables, first constructing a tree for each observable and then # combining trees. root_kinds = ['output', 'objective'] self._trees = OrderedDict() self._simple_trees = OrderedDict() self.construct_trees(self._model, self._trees, self._simple_trees, kinds=root_kinds) if self._print_trees: self._printer.prt('Dependency trees:\n', wrapped=True) self._printer.tree(self._simple_trees) unsorted_call_stack = OrderedDict() self._max_depth_all = -1 for tag in self._model: model_tag = self._model[tag] roots = [] if model_tag['kind'] in root_kinds: max_depth = 0 roots = [model_tag['kind']] else: max_depth = -1 for tag2 in self._trees: if self.in_tree(tag, self._trees[tag2]): roots.extend(self._trees[tag2]['roots']) depth = self.get_max_depth(tag, self._trees[tag2], max_depth) if depth > max_depth: max_depth = depth if depth > self._max_depth_all: self._max_depth_all = depth roots = list(sorted(set(roots))) new_entry = deepcopy(model_tag) new_entry['roots'] = roots if 'children' in new_entry: del new_entry['children'] new_entry['depth'] = max_depth unsorted_call_stack[tag] = new_entry # print(unsorted_call_stack) # Currently just have one call stack for all products, can be wasteful # if only using some products. self._call_stack = OrderedDict() for depth in range(self._max_depth_all, -1, -1): for task in unsorted_call_stack: if unsorted_call_stack[task]['depth'] == depth: self._call_stack[task] = unsorted_call_stack[task] # with open(os.path.join( # self.get_products_path(), # self._model_name + '-stack.json'), 'w') as f: # json.dump(self._call_stack, f) for task in self._call_stack: cur_task = self._call_stack[task] mod_name = cur_task.get('class', task) if cur_task['kind'] == 'parameter' and task in self._parameter_json: cur_task.update(self._parameter_json[task]) self._modules[task] = self._load_task_module(task) if mod_name == 'photometry': self._telescopes = self._modules[task].telescopes() self._instruments = self._modules[task].instruments() self._bands = self._modules[task].bands() self._modules[task].set_attributes(cur_task) # Look forward to see which modules want dense arrays. for task in self._call_stack: for ftask in self._call_stack: if (task != ftask and self._call_stack[ftask]['depth'] < self._call_stack[task]['depth'] and self._modules[ftask]._wants_dense): self._modules[ftask]._provide_dense = True # Count free parameters. self.determine_free_parameters()