def get_results(self, cfg_file): # Ugly hack: replace analyzers configuration with user's analyzer # configuration with open('config.toml') as user_cfg: user_conf = pytoml.load(user_cfg) with open(cfg_file) as data_cfg: data_conf = pytoml.load(data_cfg) data_conf['analyzers'] = user_conf['analyzers'] with tempfile.NamedTemporaryFile(mode='w+') as final_cfg: pytoml.dump(data_conf, final_cfg) final_cfg.flush() inv_idx = metapy.index.make_inverted_index(final_cfg.name) fwd_idx = metapy.index.make_forward_index(final_cfg.name) dset = metapy.classify.MulticlassDataset(fwd_idx) training_bounds = data_conf['training-set'] testing_bounds = data_conf['testing-set'] train_view = dset[training_bounds[0]:training_bounds[1]] test_view = dset[testing_bounds[0]:testing_bounds[1]] classifier = make_classifier(train_view, inv_idx, fwd_idx) return [classifier.classify(inst.weights) for inst in test_view]
def generate_search_config(ranker, is_server=True): SEARCH_DATASET = "wikipedia" dict = {} dict['stop-words'] = DATASET_CONFIG[SEARCH_DATASET]["stopwords"] dict['prefix'] = DATASET_CONFIG[SEARCH_DATASET]["prefix"] dict['dataset'] = DATASET_CONFIG[SEARCH_DATASET]["name"] dict['corpus'] = DATASET_CONFIG[SEARCH_DATASET]["corpus"] dict['index'] = DATASET_CONFIG[SEARCH_DATASET]["index"] dict['analyzers'] = [{ 'method': "ngram-word", 'ngram': 1, 'filter': "default-unigram-chain" }] dict['ranker'] = {'method': ranker.ranker_id_4_config} config_params = '' for key, value in ranker.__dict__.items(): key = str(key) if key.startswith('p_'): dict['ranker'][key[2:]] = round(float(value), 4) config_params += key[2:] + '=' + str(round(float(value), 4)) + "," file_name = 'temp.toml' with open(file_name, 'w+') as fout: pytoml.dump(fout, dict) return file_name, config_params.strip(',')
def configure(cls): try: curr = cls._load_config() except ConfigurationError: curr = DEFAULT_CONFIG curr['api_token'] = cls._config_prompt('API Token', curr, 'api_token') curr['output_date_format'] = cls._config_prompt('Output Date Format', curr, 'output_date_format') curr['time_offset'] = int(cls._config_prompt('Time Offset', curr, 'time_offset')) curr['cache_enabled'] = cls._config_prompt('Enable Cache (y/[n])', curr, 'cache_enabled').lower()[0] if curr['cache_enabled'] == 'y': curr['cache'] = cls._config_prompt('Cache File', curr, 'cache') elif 'cache' in curr: del curr['cache'] cfg_path = os.path.expanduser('~/.cliist.toml') with open(cfg_path, 'w') as fs: print pytoml.dump(fs, curr)
def addSatellite(name): global _snipsConf _logger.info('Adding satellite') try: if 'bind' not in _snipsConf['snips-audio-server']: _snipsConf['snips-audio-server']['bind'] = 'default@mqtt' if 'audio' not in _snipsConf['snips-hotword']: _snipsConf['snips-hotword']['audio'] = ['default@mqtt'] name = '{}@mqtt'.format(name) if name not in _snipsConf['snips-hotword']['audio']: _snipsConf['snips-hotword']['audio'].append(name) f = open('/etc/snips.toml', 'w') pytoml.dump(_snipsConf, f) f.close() restartSnips() except: _logger.error( 'Updating and restarting Snips after adding satellite failed') _mqttClient.publish('satConnect/server/confUpdateFailed', json.dumps({}))
def disconnectSatellite(): global _snipsConf, _running _logger.info('Disconnecting satellite from main unit') if _snipsConf['snips-common']['mqtt'] == '' or _snipsConf[ 'snips-audio-server']['bind'] == '': _logger.error( "Was asked to disconnect but it doesn't look like this is a satellite" ) _running = False raise KeyboardInterrupt else: satelliteName = _snipsConf['snips-audio-server']['bind'] _logger.info('Writting local toml configuration...') del _snipsConf['snips-common']['mqtt'] del _snipsConf['snips-audio-server']['bind'] f = open('/etc/snips.toml', 'w') pytoml.dump(_snipsConf, f) f.close() if _mqttClient is None: connectMqtt() _mqttClient.publish('satConnect/server/disconnect', json.dumps({'name': satelliteName}))
def update_config(data): """Update jumpscale config with new data Arguments: data {dict} -- dict to update the config with. """ with open(config_path, "w") as f: toml.dump(data, f)
def write_labels(filename: str, labels: typing.List[Label]) -> None: """Dump labels to the given TOML file.""" logger = logging.getLogger("labels") logger.debug(f"Writing labels to {filename}") obj = {label.name: label.params_dict for label in labels} with open(filename, "w") as labels_file: pytoml.dump(obj, labels_file)
def updateTomlConfig(): global _snipsConf, _coreIp, _satelliteName _logger.info('Writting local toml configuration...') _snipsConf['snips-common']['mqtt'] = '{}:1883'.format(_coreIp) _snipsConf['snips-audio-server']['bind'] = '{}@mqtt'.format(_satelliteName) f = open('/etc/snips.toml', 'w') pytoml.dump(_snipsConf, f) f.close() updateCoreToml()
def save_settings_table(path, data): """Saves settings to a file""" try: table_open_object = open(path, 'w') except FileNotFoundError: try: os.makedirs(os.path.dirname(path)) except OSError: pass table_open_object = open(path, 'x') pytoml.dump(table_open_object, data)
def on_chat_message(self, msg): content_type, chat_type, chat_id = telepot.glance(msg) LOG.info('incomming message content_type: {}, chat_type {}, chat_id{}'.format(content_type, chat_type, chat_id)) if content_type == 'text' and msg['text'] == '/start': chat_ids[msg['chat']['username']] = chat_id with open(chat_ids_path, "w") as f: pytoml.dump(chat_ids, f) LOG.info("Registering a new agent {}".format(chat_id)) self.sender.sendMessage('Hey, Get Ready, we will start sending you alerts') self.close()
def save_table(path, data): """Saves settings to a file""" try: table_open_object = open(path, 'w') except FileNotFoundError: try: os.makedirs(os.path.dirname(path)) except OSError: pass table_open_object = open(path, 'x') pytoml.dump(table_open_object, data)
def write_results(file, results): ret = [] for index, res in results.items(): ret.append({ 'index': index, 'jobid': str(res.jobid), 'state': {'status': res.state.status.name, 'exit_code': res.state.exit_code}, 'cwd': res.cwd }) pytoml.dump({'result': ret}, file)
def build_default_conf(folder: Path, package_name: str) -> None: conf_path = folder / CONF_NAME if conf_path.exists(): _LOGGER.info("Skipping default conf since the file exists") return _LOGGER.info("Build default conf for %s", package_name) conf = {_SECTION: _CONFIG.copy()} conf[_SECTION]["package_name"] = package_name with open(conf_path, "w") as fd: toml.dump(conf, fd)
def select_tomb_install_path(self, config): """Select Tomb's installation path.""" tomb_install_path = QFileDialog.getExistingDirectory( self, 'Select Tomb Installation Path') if tomb_install_path: self.tomb_path_line.setText(tomb_install_path) config['configuration']['path'] = tomb_install_path with open(self.user_config_file, 'w') as conffile: pytoml.dump(conffile, config)
def build_default_conf(folder: Path, package_name: str) -> None: conf_path = folder / CONF_NAME if conf_path.exists(): _LOGGER.info("Skipping default conf since the file exists") return _LOGGER.info("Build default conf for %s", package_name) conf = {_SECTION: _CONFIG.copy()} conf[_SECTION]["package_name"] = package_name conf[_SECTION]["package_nspkg"] = package_name[:package_name.rindex('-')]+"-nspkg" with open(conf_path, "w") as fd: toml.dump(conf, fd)
def nninit_cli(settings): nndir = Path(settings.nndir) try: nndir.mkdir() except OSError as e: if e.errno == 17: die('directory \"%s\" already exists' % settings.nndir) raise cfg_path = nndir / 'nn_config.toml' with cfg_path.open('wt') as f: pytoml.dump(f, NNINIT_DEFAULT_CONFIG)
def export_to_file( self, data: Any, filename: str, file_kwargs: Optional[Mapping] = None, dump_kwargs: Optional[Mapping] = None, ) -> None: if not file_kwargs: file_kwargs = {"mode": "w"} if not dump_kwargs: dump_kwargs = {} with open(filename, **file_kwargs) as f: pytoml.dump(data, f, **dump_kwargs)
def convert(path): cp = configparser.ConfigParser() with path.open(encoding='utf-8') as f: cp.read_file(f) ep_file = Path('entry_points.txt') metadata = OrderedDict() for name, value in cp['metadata'].items(): if name in metadata_list_fields: metadata[name] = [l for l in value.splitlines() if l.strip()] elif name == 'entry-points-file': ep_file = Path(value) else: metadata[name] = value if 'scripts' in cp: scripts = OrderedDict(cp['scripts']) else: scripts = {} entrypoints = CaseSensitiveConfigParser() if ep_file.is_file(): with ep_file.open(encoding='utf-8') as f: entrypoints.read_file(f) written_entrypoints = False with Path('pyproject.toml').open('w', encoding='utf-8') as f: f.write(TEMPLATE.format(metadata=pytoml.dumps(metadata))) if scripts: f.write('\n[tool.flit.scripts]\n') pytoml.dump(scripts, f) for groupname, group in entrypoints.items(): if not dict(group): continue if '.' in groupname: groupname = '"{}"'.format(groupname) f.write('\n[tool.flit.entrypoints.{}]\n'.format(groupname)) pytoml.dump(OrderedDict(group), f) written_entrypoints = True print("Written 'pyproject.toml'") files = str(path) if written_entrypoints: files += ' and ' + str(ep_file) print("Please check the new file, then remove", files)
def export_to_file( self, data: Any, filename: str, file_kwargs: Optional[Mapping] = None, dump_kwargs: Optional[Mapping] = None, ) -> None: if not file_kwargs: file_kwargs = { "mode": "w", "encoding": "utf-8", } if not dump_kwargs: dump_kwargs = {} with open(filename, **file_kwargs) as f: # pylint: disable=unspecified-encoding pytoml.dump(data, f, **dump_kwargs)
def generate_eval_config(ranker, dataset): # dataset = 'cranfield' start_index = {'cranfield': 1, 'apnews': 0} print('dataset!!', dataset) dict = {} dict['stop-words'] = "data/lemur-stopwords.txt" dict['prefix'] = "." dict['dataset'] = dataset dict['corpus'] = "line.toml" dict['index'] = dataset + "-index" dict['query-judgements'] = "data/" + dataset + "-qrels.txt" dict['analyzers'] = [{ 'method': "ngram-word", 'ngram': 1, 'filter': "default-unigram-chain" }] dict['query-runner'] = { 'query-path': "data/" + dataset + "-queries.txt", 'query-id-start': start_index[dataset] } config_params = '' for key, value in ranker.__dict__.items(): key = str(key) # print(key) if key.startswith('p_'): config_params += key[2:] + '=' + str(round(float(value), 4)) + "," # while True: # _num = random.randint(1, 10000) # file_name = 'c-'+str(_num) + '.toml' # base_dir = os.path.abspath(settings.BASE_DIR) # file_path = os.path.join(base_dir, file_name) # my_file = Path(file_path) # if not my_file.is_file(): # with open(my_file, 'w+') as fout: # pytoml.dump(fout, dict) # break file_name = 'temp.toml' with open(file_name, 'w+') as fout: pytoml.dump(fout, dict) return file_name, config_params.strip(',')
def lock_domain_range_cli(settings): from . import DomainRange # Load samples df = basic_load(settings.datadir) # Load skeleton config cfg_path = Path(settings.nndir) / 'nn_config.toml' with cfg_path.open('rt') as f: info = pytoml.load(f) # Turn into processed DomainRange object dr = DomainRange.from_info_and_samples(info, df) # Update config and rewrite dr.into_info(info) with cfg_path.open('wt') as f: pytoml.dump(f, info)
def generate_config(ds_name, path): """ Construct config.toml for the dataset & Assume line.toml is constructed after uploading If already exists, return the config file """ cfg = path + "/" + ds_name + "-config.toml" if os.path.isfile(cfg): return cfg obj = dict() obj['prefix'] = "." obj['dataset'] = ds_name obj['corpus'] = "file.toml" obj['index'] = ds_name + "-idx" obj['analyzers'] = [dict()] analyzer = obj['analyzers'][0] analyzer['ngram'] = 1 analyzer['method'] = "ngram-word" analyzer['filter'] = [{'type': "icu-tokenizer"}, {'type': "lowercase"}] with open(cfg, 'w+') as f: toml.dump(f, obj) return cfg
def create_config(path, config_dict=None): """ Create config file. """ if config_dict is None: config_dict = {} file_extension = os.path.splitext(path)[-1] path = get_full_path(path) if not os.path.exists(path): create_path(path, is_file=True) with open(path, 'w') as config_file: if file_extension == ".json": json.dump(config_dict, config_file, indent=4, sort_keys=True) elif file_extension == ".toml": pytoml.dump(config_dict, config_file, sort_keys=True) else: warning_message = """"{file}" has not been created! It has a format different from json and toml.""".format(file=path) logging.warning(warning_message)
def update_toml(cls, path): """Update a config file, preserving existing known entries but adding values for parameters that weren't given values explicitly before. Note that this intentionally does not use the inheritance scheme, since we don't want to add all of the inherited values to the existing file. """ import pytoml try: with open(path, 'rt') as f: data = pytoml.load(f) except FileNotFoundError: # yay Python 3! data = {} inst = cls.from_collection(data) inst.to_collection(data) with open(path, 'wt') as f: pytoml.dump(f, data, sort_keys=True)
def removeSatellite(name): global _snipsConf _logger.info('Removing satellite') try: if 'audio' not in _snipsConf['snips-hotword']: _snipsConf['snips-hotword']['audio'] = ['default@mqtt'] if name in _snipsConf['snips-hotword']['audio']: del _snipsConf['snips-hotword']['audio'][name] f = open('/etc/snips.toml', 'w') pytoml.dump(_snipsConf, f) f.close() restartSnips() except: _logger.error( 'Updating and restarting Snips after satellite deletion failed') _mqttClient.publish('satConnect/server/confUpdateFailed', json.dumps({}))
def generate_config(ds_name, path): """ Construct config.toml for the dataset & Assume line.toml is constructed after uploading If already exists, return the config file """ cfg = path + "/" + ds_name + "-config.toml" if os.path.isfile(cfg): return cfg obj = dict() obj['prefix'] = "." obj['dataset'] = ds_name obj['corpus'] = "file.toml" obj['index'] = ds_name + "-idx" obj['stop-words'] = cfg["perm_dataset_base_path"] + "/stopwords.txt" obj['analyzers'] = [dict()] analyzer = obj['analyzers'][0] analyzer['ngram'] = 1 analyzer['method'] = "ngram-word" analyzer['filter'] = "default-unigram-chain" with open(cfg, 'w+') as f: toml.dump(f, obj) return cfg
def update_config(path="", config_dict=None): """ Update current config file """ if config_dict is None: config_dict = {} path = get_full_path(path) file_extension = os.path.splitext(path)[-1] if not os.path.exists(path): warning_message = """The "{file}" does not exist!""".format(file=path) logging.warning(warning_message) return with open(path, 'w') as config_file: if file_extension == ".json": json.dump(config_dict, config_file, indent=4, sort_keys=True) elif file_extension == ".toml": pytoml.dump(config_dict, config_file, sort_keys=True) else: warning_message = """"{file}" has not been updated! It has a format different from json and toml.""".format(file=path) logging.warning(warning_message)
def serialize(obj, **options): """ Serialize Python data to TOML. :param obj: the data structure to serialize. :param options: options given to lower pytoml module. """ try: if "file_out" in options: return toml.dump(obj, options["file_out"], **options) else: return toml.dumps(obj, **options) except Exception as error: # pylint: disable=broad-except raise SerializationError(error)
def write_params(self, form_params, user): sim_dir = os.path.join(user_dir, user, self.appname, form_params[u'case_id']) if not os.path.exists(sim_dir): os.makedirs(sim_dir) file_name = os.path.join(sim_dir, self.simfn) toml_dict = {} for section in self.blockorder: toml_dict[section] = {} for k in self.blockmap[section]: if k in form_params: original_type = type(self.original_toml_dict[section][k]) toml_dict[section][k] = self.cast_string_to_original_type( form_params[k], original_type) with open(file_name, u'w') as f: toml.dump(toml_dict, f) return 1
def serialize(obj, **options): ''' Serialize Python data to TOML. :param obj: the data structure to serialize. :param options: options given to lower pytoml module. ''' try: if 'file_out' in options: return toml.dump(obj, options['file_out'], **options) else: return toml.dumps(obj, **options) except Exception as error: raise SerializationError(error)
def _update_dataset_config(self, ds_path, dataset_name): cfg = ds_path + '/' + dataset_name + '/config.toml' obj = dict() obj['prefix'] = ds_path obj['stop-words'] = ds_path + '/stopwords.txt' obj['dataset'] = dataset_name obj['corpus'] = "file.toml" obj['index'] = ds_path + '/idx/' + dataset_name + "-idx" obj['query-judgements'] = ds_path + '/' + dataset_name + '/' + dataset_name + '-qrels.txt' obj['analyzers'] = [dict()] analyzer = obj['analyzers'][0] analyzer['ngram'] = 1 analyzer['method'] = "ngram-word" analyzer['filter'] = "default-unigram-chain" obj['query-runner'] = dict() obj['query-runner'][ 'query-path'] = ds_path + '/' + dataset_name + '/' + dataset_name + '-queries.txt' obj['query-runner']['query-id-start'] = 0 obj['query-runner']['timeout'] = 120 with open(cfg, 'w+') as f: toml.dump(f, obj) f.close()
def generate_eval_config(ranker, dataset): print('dataset!!', dataset) eval_dict = {} eval_dict['stop-words'] = DATASET_CONFIG[dataset]["stopwords"] eval_dict['prefix'] = DATASET_CONFIG[dataset]["prefix"] eval_dict['dataset'] = DATASET_CONFIG[dataset]["name"] eval_dict['corpus'] = DATASET_CONFIG[dataset]["corpus"] eval_dict['index'] = DATASET_CONFIG[dataset]["index"] eval_dict['query-judgements'] = DATASET_CONFIG[dataset]["query-judgements"] eval_dict['analyzers'] = [{ 'method': "ngram-word", 'ngram': 1, 'filter': "default-unigram-chain" }] eval_dict['query-runner'] = { 'query-path': DATASET_CONFIG[dataset]["query-path"], 'query-id-start': DATASET_CONFIG[dataset]["query-id-start"] } config_params = '' for key, value in ranker.__dict__.items(): key = str(key) if key.startswith('p_'): config_params += key[2:] + '=' + str(round(float(value), 4)) + "," file_name = 'temp.toml' with open(file_name, 'w+') as fout: pytoml.dump(fout, eval_dict) fout.close() return file_name, config_params.strip(',')
def write(config): global _CONFIG _CONFIG = config c_path = _config_fpath() with open(c_path, 'w') as fs: pytoml.dump(fs, config)
def main(): argparser = get_parser() args = argparser.parse_args(sys.argv[1:]) if args.profile_clean: profile_clean() return distutils.log.set_verbosity(1) system = BuildSystem(args) if args.path: ext_modules = [system.get_extension_by_path(args.path)] else: ext_modules = system.discover_extensions() if args.kit: ext_modules = [e for e in ext_modules if e.name in kits[args.kit]] def add_args(arg_name, values, append=True): for ext_module in ext_modules: arg_value = getattr(ext_module, arg_name) or [] if append: arg_value.extend(values) else: newvalues = list(values) newvalues.extend(arg_value) arg_value = newvalues setattr(ext_module, arg_name, arg_value) def append_compile_args(*values): add_args('extra_compile_args', values) def append_link_args(*values): add_args('extra_link_args', values) def prepend_libraries(*values): add_args('libraries', values, append=False) if args.native: append_compile_args('-march=native') if args.optimization: append_compile_args('-O' + args.optimization) if args.debug: append_compile_args('-g3', '-O0', '-Wp,-U_FORTIFY_SOURCE') if args.sanitize: append_compile_args('-g3', '-fsanitize=address', '-fsanitize=undefined', '-fno-common', '-fno-omit-frame-pointer') prepend_libraries('asan', 'ubsan') if args.profile_generate: append_compile_args('--profile-generate') append_link_args('-lgcov') if args.profile_use: for ext_module in ext_modules: if ext_module.name in ('parser.cparser', 'pipeline.cpipeline'): continue ext_module.extra_compile_args.append('--profile-use') if args.flto: append_compile_args('-flto') append_link_args('-flto') if args.coverage: append_compile_args('--coverage') append_link_args('-lgcov') if args.extra_compile: append_compile_args(args.extra_compile) ext_modules = [e for e in ext_modules if system.should_rebuild(e)] if not ext_modules: return dist = Distribution(dict(ext_modules=ext_modules)) prune(args.dest) cmd = custom_build_ext(dist) cmd.build_lib = os.path.join(args.dest, '.build/lib') cmd.build_temp = os.path.join(args.dest, '.build/temp') cmd.finalize_options() try: cmd.run() except CompileError: sys.exit(1) symlink_python_files(args.dest) for ext_module in ext_modules: os.makedirs(system.dest_folder(ext_module.name), exist_ok=True) shutil.copy( cmd.get_ext_fullpath(ext_module.name), system.dest_folder(ext_module.name)) for ext_module in ext_modules: with open(system.build_toml(ext_module.name), 'w') as f: build_info = { 'extra_compile_args': ext_module.extra_compile_args, 'extra_link_args': ext_module.extra_link_args, 'define_macros': dict(ext_module.define_macros), 'sources': ext_module.sources } pytoml.dump(f, build_info)
def save(self, path): with open(path, 'w') as fp: toml.dump(fp, self)
def dump_config(config, path): if "version" not in config: raise VersionNotDefined() with open(path, "w") as f: return pytoml.dump(f, config)
def dump_file(self, data, fn): with open(fn, 'w') as f: toml.dump(f, data)
hekaToml = None with open('heka/hekad_example.toml', 'r') as tomlFile: hekaToml = toml.load(tomlFile) # Inject values for Slack communication hekaToml["SlackEncoder"]["config"]["username"]=configToml["Slack"]["username"] hekaToml["SlackEncoder"]["config"]["channel"]=configToml["Slack"]["channel"] hekaToml["SlackOutput"]["address"]=configToml["Slack"]["address"] # Inject values for environment names for key, values in hekaToml.iteritems(): if "message_matcher" in values: values["message_matcher"] = values["message_matcher"].replace("cf-np", configToml["Env"]["np"]) values["message_matcher"] = values["message_matcher"].replace("cf-prd", configToml["Env"]["prd"]) # Write to the final hekad.toml with open('heka/hekad.toml', 'w') as tomlFile: toml.dump(tomlFile, hekaToml) # Heka Globals with open('heka/lua_modules/sample_globals.lua', 'r') as sampleFile, open('heka/lua_modules/globals.lua', 'w') as globalFile: for line in sampleFile: line = line.replace("http://server.company.com", configToml["Host"]["protocol"] + "://" + configToml["Host"]["grafana_address"]) line = line.replace("cf-prd", configToml["Env"]["prd"]) line = line.replace("cf-np", configToml["Env"]["np"]) globalFile.write(line) # Grafana Config grafanaConfig = ConfigParser.ConfigParser() grafanaConfig.read("grafana/grafana_example.ini") # Set the domain for viewing grafana grafanaConfig.set("server", "domain", configToml["Host"]["grafana_address"])
for path in EXCLUDED_CRATES: all_configs.remove(path) try: print("Downloading dependencies for:") for config in all_configs: print(" - %s" % config) config = { "workspace": { "members": list(all_configs) } } with open(toml_path, "w") as config_file: pytoml.dump(config, config_file) cargo_bin = get_cargo_bin() # Generate Cargo.lock. lockfile_args = [ cargo_bin, "generate-lockfile", ] call_or_exit(lockfile_args, base_dir) crates = parse_dependencies(lock_path) # Populate the vendor directory. vendor_args = [ args.cargo_vendor,
def save(self): """ Save the TOML file """ with open(self._filename, "w") as f: pytoml.dump(f, self._collapse(self._toml))