def load_config(): filename, ext = os.path.splitext("jupyter_notebook_config.py") new_config = Config() for config in Application._load_config_files(filename, path=config_file_paths()): new_config.merge(config) return new_config
def test_fromdictmerge2(self): c1 = Config({"Foo": {"baz": 2}}) c2 = Config({"Foo": {"bar": 1}}) c1.merge(c2) self.assertEqual(c1.Foo.__class__, Config) self.assertEqual(c1.Foo.bar, 1) self.assertEqual(c1.Foo.baz, 2) self.assertNotIn("baz", c2.Foo)
def test_fromdictmerge2(self): c1 = Config({'Foo' : {'baz' : 2}}) c2 = Config({'Foo' : {'bar' : 1}}) c1.merge(c2) self.assertEqual(c1.Foo.__class__, Config) self.assertEqual(c1.Foo.bar, 1) self.assertEqual(c1.Foo.baz, 2) self.assertNotIn('baz', c2.Foo)
def test_fromdictmerge2(self): c1 = Config({'Foo': {'baz': 2}}) c2 = Config({'Foo': {'bar': 1}}) c1.merge(c2) self.assertEqual(c1.Foo.__class__, Config) self.assertEqual(c1.Foo.bar, 1) self.assertEqual(c1.Foo.baz, 2) self.assertNotIn('baz', c2.Foo)
def test_merge_no_copies(self): c = Config() c2 = Config() c2.Foo.trait = [] c.merge(c2) c2.Foo.trait.append(1) self.assertIs(c.Foo, c2.Foo) self.assertEqual(c.Foo.trait, [1]) self.assertEqual(c2.Foo.trait, [1])
def load_config_file(self, filename, path=None): """Load config files by filename and path.""" filename, ext = os.path.splitext(filename) new_config = Config() for config in self._load_config_files(filename, path=path, log=self.log, raise_config_file_errors=self.raise_config_file_errors, ): new_config.merge(config) # add self.cli_config to preserve CLI config priority new_config.merge(self.cli_config) self.update_config(new_config)
def test_merge_doesnt_exist(self): c1 = Config() c2 = Config() c2.bar = 10 c2.Foo.bar = 10 c1.merge(c2) self.assertEqual(c1.Foo.bar, 10) self.assertEqual(c1.bar, 10) c2.Bar.bar = 10 c1.merge(c2) self.assertEqual(c1.Bar.bar, 10)
def main(): args = parser.parse_args() log.setLevel(logging.INFO) handler = logging.StreamHandler() logging.getLogger().addHandler(handler) log.info(f'Input file: {args.input_file}') log.info(f'Number of events in each subrun: {args.max_events}') path_list = sorted(glob.glob(args.input_file)) log.info(f'list of files: {path_list}') config_dic = {} # read the configuration file if args.config is not None: config_dic = read_configuration_file(args.config) config = Config(config_dic) source_config = Config({ "LSTEventSource": { "max_events": args.max_events, "pointing_information": False, "default_trigger_type": 'tib', "use_flatfield_heuristic": args.use_flatfield_heuristic, "EventTimeCalculator": { "run_summary_path": args.run_summary_path, }, "LSTR0Corrections": { "drs4_pedestal_path": args.pedestal_file, } } }) config.merge(source_config) with EventSource(path_list[0]) as s: subarray = s.subarray timeCorr = TimeCorrectionCalculate(calib_file_path=args.output_file, config=config, subarray=subarray) for i, path in enumerate(path_list): log.info(f'File {i + 1} out of {len(path_list)}') log.info(f'Processing: {path}') reader = EventSource(input_url=path, config=config) for event in tqdm(reader, disable=args.no_progress): timeCorr.calibrate_peak_time(event) # write output timeCorr.finalize()
def main(): log.setLevel(logging.INFO) handler = logging.StreamHandler() logging.getLogger().addHandler(handler) log.info(f'Input file: {args.input_file}') log.info(f'Number of events in each subrun: {args.max_events}') path_list = sorted(glob.glob(args.input_file)) log.info(f'list of files: {path_list}') config_dic = {} # read the configuration file if args.config_file is not None: config_dic = read_configuration_file(args.config_file) config = Config(config_dic) source_config = Config({ "LSTEventSource": { "max_events": args.max_events, "default_trigger_type": 'tib', "EventTimeCalculator": { "run_summary_path": args.run_summary_path, }, "LSTR0Corrections": { "drs4_pedestal_path": args.pedestal_file, } } }) config.merge(source_config) for i, path in enumerate(path_list): log.info(f'File {i+1} out of {len(path_list)}') log.info(f'Processing: {path}') reader = EventSource(input_url=path, config=config) if i == 0: timeCorr = TimeCorrectionCalculate( calib_file_path=args.output_file, config=config, subarray=reader.subarray) for event in reader: if event.index.event_id % 5000 == 0: log.info(f'event id = {event.index.event_id}') timeCorr.calibrate_peak_time(event) # write output timeCorr.finalize()
def load_config_file(self, filename, path=None): """Load config files by filename and path.""" filename, ext = os.path.splitext(filename) new_config = Config() for (config, filename) in self._load_config_files(filename, path=path, log=self.log, raise_config_file_errors=self.raise_config_file_errors, ): new_config.merge(config) if filename not in self._loaded_config_files: # only add to list of loaded files if not previously loaded self._loaded_config_files.append(filename) # add self.cli_config to preserve CLI config priority new_config.merge(self.cli_config) self.update_config(new_config)
def test_merge_exists(self): c1 = Config() c2 = Config() c1.Foo.bar = 10 c1.Foo.bam = 30 c2.Foo.bar = 20 c2.Foo.wow = 40 c1.merge(c2) self.assertEqual(c1.Foo.bam, 30) self.assertEqual(c1.Foo.bar, 20) self.assertEqual(c1.Foo.wow, 40) c2.Foo.Bam.bam = 10 c1.merge(c2) self.assertEqual(c1.Foo.Bam.bam, 10)
def test_merge_multi_lazyII(self): """ With multiple config files (systemwide and users), we want compounding. If both are lazy we still want a lazy config. """ c1 = Config() c2 = Config() c1.Foo.trait.append(1) c2.Foo.trait.append(2) c = Config() c.merge(c1) c.merge(c2) self.assertEqual(c.Foo.trait._extend, [1, 2])
def test_merge_multi_lazy_update_III(self): """ With multiple config files (systemwide and users), we want compounding. Later dict overwrite lazyness """ c1 = Config() c2 = Config() c1.Foo.trait.update({"a": 0, "b": 1}) c2.Foo.trait.update({"a": 1, "z": 26}) c = Config() c.merge(c1) c.merge(c2) self.assertEqual(c.Foo.trait._update, {"a": 1, "z": 26, "b": 1})
def test_merge_multi_lazy_update_I(self): """ With multiple config files (systemwide and users), we want compounding. dict update shoudl be in the right order. """ c1 = Config() c2 = Config() c1.Foo.trait = {"a": 1, "z": 26} c2.Foo.trait.update({"a": 0, "b": 1}) c = Config() c.merge(c1) c.merge(c2) self.assertEqual(c.Foo.trait, {"a": 0, "b": 1, "z": 26})
def test_merge_multi_lazy_IV(self): """ With multiple config files (systemwide and users), we want compounding. Both prepending should be lazy """ c1 = Config() c2 = Config() c1.Foo.trait.prepend([1]) c2.Foo.trait.prepend([0]) c = Config() c.merge(c1) c.merge(c2) self.assertEqual(c.Foo.trait._prepend, [0, 1])
def test_merge_multi_lazy_III(self): """ With multiple config files (systemwide and users), we want compounding. Prepend should prepend in the right order. """ c1 = Config() c2 = Config() c1.Foo.trait = [1] c2.Foo.trait.prepend([0]) c = Config() c.merge(c1) c.merge(c2) self.assertEqual(c.Foo.trait, [0, 1])
def test_merge_multi_lazy(self): """ With multiple config files (systemwide and users), we want compounding. If systemwide overwirte and user append, we want both in the right order. """ c1 = Config() c2 = Config() c1.Foo.trait = [1] c2.Foo.trait.append(2) c = Config() c.merge(c1) c.merge(c2) self.assertEqual(c.Foo.trait, [1, 2])
def load_config_file(self, filename, path=None): """Load config files by filename and path.""" filename, ext = os.path.splitext(filename) loaded = [] new_config = Config() for config in self._load_config_files(filename, path=path, log=self.log, raise_config_file_errors=self.raise_config_file_errors, ): loaded.append(config) new_config.merge(config) # add self.cli_config to preserve CLI config priority new_config.merge(self.cli_config) self.update_config(new_config) if len(loaded) > 1: collisions = loaded[0].collisions(loaded[1]) if collisions: self.log.warning("Collisions detected in {0}.py and {0}.json config files." " {0}.json has higher priority: {1}".format( filename, json.dumps(collisions, indent=2), ))
def load_file_config(config=None): if config is None: config = Config() file_config = Config() config_paths = list() def maybe_add_config_path(config_path): config_path = validate_config_path(config_path, config) if config_path and config_path not in config_paths: config_paths.append(config_path) # TODO: generate config file list and config_path from config # Config files might be sourced from $CWD or $HOME or /etc/ maybe_add_config_path(config.BaseConfig.get('config_path')) for config_path in config_paths: new_config = load_single_file_config(config_path, config) ROOT_LOGGER.info("merging file config \n%s", pprint.pformat(new_config)) file_config.merge(new_config) return file_config
def test_fromdictmerge(self): c1 = Config() c2 = Config({"Foo": {"bar": 1}}) c1.merge(c2) self.assertEqual(c1.Foo.__class__, Config) self.assertEqual(c1.Foo.bar, 1)
# # 3. Update nbconvert configuration # json_config = os.path.join(jupyter_config_dir(), 'jupyter_nbconvert_config.json') print("Configuring %s" % json_config) if os.path.isfile(json_config) is True: cl = JSONFileConfigLoader(json_config) config = cl.load_config() else: config = Config() newconfig = Config() # Set template path, pre- and postprocessors of notebook extensions newconfig.Exporter.template_path = [os.path.join(data_dir, 'templates')] newconfig.Exporter.preprocessors = ["pre_codefolding.CodeFoldingPreprocessor", "pre_pymarkdown.PyMarkdownPreprocessor"] newconfig.NbConvertApp.postprocessor_class = 'post_embedhtml.EmbedPostProcessor' config.merge(newconfig) config.version = 1 s=json.dumps(config, indent=2, separators=(',', ': '), sort_keys=True) with open(json_config, 'w') as f: f.write(s) py_config = os.path.join(jupyter_config_dir(), 'jupyter_nbconvert_config.py') print("Configuring %s" % py_config) new_py_config = 'jupyter_nbconvert_config.py' update_config(py_config, new_py_config) # # 4. Update notebook configuration # fname = os.path.join(config_dir, 'jupyter_notebook_config.json')
def test_fromdictmerge(self): c1 = Config() c2 = Config({'Foo' : {'bar' : 1}}) c1.merge(c2) self.assertEqual(c1.Foo.__class__, Config) self.assertEqual(c1.Foo.bar, 1)
def test_fromdictmerge(self): c1 = Config() c2 = Config({'Foo': {'bar': 1}}) c1.merge(c2) self.assertEqual(c1.Foo.__class__, Config) self.assertEqual(c1.Foo.bar, 1)
os.mkdir(config_dir) if os.path.exists(data_dir) is False: os.mkdir(data_dir) # # 3. Update nbconvert configuration # json_config = os.path.join(jupyter_config_dir(), 'jupyter_nbconvert_config.json') print("Configuring %s" % json_config) if os.path.isfile(json_config) is True: cl = JSONFileConfigLoader(json_config) config = cl.load_config() else: config = Config() newconfig = Config() # Set template path, pre- and postprocessors of notebook extensions newconfig.Exporter.template_path = ['.', os.path.join(data_dir, 'templates')] config.merge(newconfig) config.version = 1 s = json.dumps(config, indent=2, separators=(',', ': '), sort_keys=True) with open(json_config, 'w') as f: f.write(s) py_config = os.path.join(jupyter_config_dir(), 'jupyter_nbconvert_config.py') print("Configuring %s" % py_config) new_py_config = 'nbconvert_config.py' #'jupyter_nbconvert_config.py' update_config(py_config, new_py_config)