def main(config_path): config = ConfigParser(interpolation=ExtendedInterpolation()) config.read(config_path) for dataset_number in range(int(config["datasets"]["number_of_datasets"])): dataset_i = "dataset" + str(int(dataset_number) + 1) print("Transforming mapping " + config[dataset_i]["mapping"].split("/") [len(config[dataset_i]["mapping"].split("/")) - 1] + ".\n") triples_map_list = mapping_parser(config[dataset_i]["mapping"]) update_mapping(triples_map_list, config[dataset_i]["mapping"]) print("Complete transforming mapping " + config[dataset_i]["mapping"].split("/")[ len(config[dataset_i]["mapping"].split("/")) - 1] + ".\n")
def semantify(config_path): """ Takes the configuration file path and sets the necessary variables to perform the semantification of each dataset presented in said file. Given a TTL/N3 mapping file expressing the correspondance rules between the raw data and the desired semantified data, the main function performs all the necessary operations to do this transformation Parameters ---------- config_path : string Path to the configuration file Returns ------- An .nt file per each dataset mentioned in the configuration file semantified. If the duplicates are asked to be removed in main memory, also returns a -min.nt file with the triples sorted and with the duplicates removed. (No variable returned) """ config = ConfigParser(interpolation=ExtendedInterpolation()) config.read(config_path) for dataset_number in range(int(config["datasets"]["number_of_datasets"])): dataset_i = "dataset" + str(int(dataset_number) + 1) triples_map_list = mapping_parser(config[dataset_i]["mapping"]) output_file = config["datasets"]["output_folder"] + "/" + config[dataset_i]["name"] + ".nt" print("Semantifying {}.{}...".format(config[dataset_i]["name"], config[dataset_i]["format"])) with open(output_file, "w") as output_file_descriptor: for triples_map in triples_map_list: if str(triples_map.file_format).lower() == "csv" and config[dataset_i]["format"].lower() == "csv": semantify_csv(triples_map, triples_map_list, ",", output_file_descriptor) elif str(triples_map.file_format).lower() == "csv" and config[dataset_i]["format"].lower() == "tsv": semantify_csv(triples_map, triples_map_list, "\t", output_file_descriptor) elif triples_map.file_format == "JSONPath": semantify_json(triples_map, triples_map_list, output_file_descriptor) else: print("Invalid reference formulation or format") print("Aborting...") sys.exit(1) if config[dataset_i]["remove_duplicate_triples_in_memory"].lower() == "yes": output_file_name = output_file = config["datasets"]["output_folder"] + "/" + config[dataset_i]["name"] cmd = 'sort -u {} > {}'.format(output_file_name + ".nt", output_file + "-min.nt") subprocess.call(cmd, shell=True) print("Successfully semantified {}.{}\n".format(config[dataset_i]["name"], config[dataset_i]["format"]))
def __init__(self): # recognised schedule file types self.allowed_sched_types = ["vex", "skd"] def parse_onoff(val): val = val.lower() if val in ["on", "off"]: return val else: return "off" def parse_ccal_pol(val): val = val.lower() if val in ["none", "0", "1", "2", "3"]: return val else: return "none" def parse_vsi_align(val): val = val.lower() if val in ["none", "0", "1"]: return val else: return "none" # self.config = configparser.ConfigParser(interpolation=ExtendedInterpolation()) self.config = configparser.ConfigParser( converters={ "onoff": parse_onoff, "contcalpol": parse_ccal_pol, "vsialign": parse_vsi_align, }) self.config._interpolation = ExtendedInterpolation() self.logger = None self.stations = [] self.servers = [] self.sched_types = [] self.master_check_interval = 24 self.sched_check_interval = 1 self.sched_look_ahead_time_d = 7 self.tpi_period = 0 self.cont_cal_action = "off" self.cont_cal_polarity = "none" self.vsi_align = "none" self.do_drudg = True # These default parameters are command-line options only. We transfer them to this structure in the "load" routine self.get_session = None # Just look for schedules from this session self.force_master_update = False # Force a download of the master file(s) self.force_sched_update = False # Force a download of the schedule file(s) self.current = False # Only process the current or next experiment self.run_once = False # Only check schedules once, then exit. No wait loop self.all_stations = False # get the first session with all the names stations self.year = None # Which year to check.
def __init__(self, config_file_path: Path): """Constructor. :param config_file_path: Config file path. :raises RuntimeError: if config_file_path does not exist. """ if not config_file_path.exists() or config_file_path.is_dir(): raise FileExistsError( f'no config file found at {config_file_path}') config_parser = ConfigParser(interpolation=ExtendedInterpolation()) config_parser.read(config_file_path) self.config_parser = config_parser
def get_config_var(sec, var): from configparser import ConfigParser, ExtendedInterpolation try: path_config = "/opt/smon/smon.cfg" config = ConfigParser(interpolation=ExtendedInterpolation()) config.read(path_config) except: logger.fatal('Check the config file, whether it exists and the path. Must be: /opt/smon/smon.cfg') try: return config.get(sec, var) except: logger.fatal('Check the config file. Presence section %s and parameter %s' % (sec, var))
def __init__(self, config_path, config_name): "初始化" settings = setting.Settings() self.data = settings.data self.config = configparser.ConfigParser( interpolation=ExtendedInterpolation()) self.config_path = config_path self.config_name = config_name self.path = os.path.join(self.config_path, self.config_name) if not os.path.exists(self.path): self.write_config(settings.data) self.config.read(self.path)
def update_config_plugins(): config = ConfigParser(interpolation=ExtendedInterpolation()) config.read(path.join(rp, 'data', 'config.ini')) config.set('Plugins', 'plugins', 'enabled') for plug in config['Plugins']: if plug not in plugins_dir_list(): config.remove_option('Plugins', plug) for plug in plugins_dir_list(): if plug not in config['Plugins']: config['Plugins'][plug] = 'enabled' with open(path.join(rp, 'data', 'config.ini'), 'w') as file: config.write(file)
def __init__(self, config_path='cfg/cfg.ini'): pl_config = ConfigParser(interpolation=ExtendedInterpolation()) pl_config.read(config_path) self.end_chap = pl_config['SCRAPER'].getint('end_chap') + 1 self.char_link_fp = pl_config['SCRAPER'].get('char_link_fp') self.chap_appearance_fp = pl_config['SCRAPER'].get( 'chap_appearance_fp') self.char_details_fp = pl_config['SCRAPER'].get('char_details_fp') self.age_bounty_fp = pl_config['SCRAPER'].get('age_bounty_fp') self.arcs = generate_arc(self.end_chap)
def __init__(self, defaults_file=os.path.join('config', 'defaults.cfg'), config_file='', **kwargs): """""" #super(Config, self).__init__(defaults=kwargs.pop('DEFAULT', {})) super(Config, self).__init__(interpolation=ExtendedInterpolation()) self.read([defaults_file, config_file]) for section, options in six.iteritems(kwargs): if section != 'DEFAULT' and not self.has_section(section): self.add_section(section) for option, value in six.iteritems(options): self.set(section, option, str(value)) return
def __init__(self, ini_file, logger=Logger, category="ve"): """ :param ini_file: config.ini :param logger: required :param category: CNCI params, -m or --model : assign the classification models ("ve" for vertebrate species, "pl" for plat species) """ self.ini_file = ini_file self.main_logger = logger self.category = category self.handle = ConfigParser(interpolation=ExtendedInterpolation()) self.handle.read_file(open(self.ini_file)) creat_dir(self.log_path)
def __init__(self, word_emb, char_emb, is_training=True): self.word_emb = tf.constant(word_emb, dtype=tf.float32) self.char_emb = tf.constant(char_emb, dtype=tf.float32) self.is_training = is_training self.config = ConfigParser(interpolation=ExtendedInterpolation()) self.config.read('config.ini') # dimensions and limits for the model self.con_lim = self.config['dim'].getint('para_limit') # if is_training else self.config['dim'].getint('test_para_limit') self.ques_lim = self.config['dim'].getint('ques_limit') # if is_training else self.config['dim'].getint('test_ques_limit') self.char_lim = self.config['dim'].getint('char_limit') self.hid_dim = self.config['dim'].getint('hidden_layer_size') self.enc_dim = self.config['dim'].getint('encode_dim') self.char_dim = self.config['dim'].getint('char_dim') self.N = self.config['dim'].getint('batch_size') # hyper params - TODO: put into a config file self.emb_kernel_size = 7 self.emb_num_blocks = 1 self.emb_num_conv_layers = 4 self.model_num_blocks = 7 self.model_num_conv_layers = 2 self.l2_regularizer = tf.contrib.layers.l2_regularizer(scale=3e-7) self.dropout = 0.1 # encoder blocks self.emb_encoder = EncoderBlk(self.emb_num_blocks, self.emb_num_conv_layers, self.emb_kernel_size, self.enc_dim, self.is_training) self.model_blk_1 = EncoderBlk(self.model_num_blocks, self.model_num_conv_layers, self.emb_kernel_size, self.enc_dim, self.is_training) self.model_blk_2 = EncoderBlk(self.model_num_blocks, self.model_num_conv_layers, self.emb_kernel_size, self.enc_dim, self.is_training) self.model_blk_3 = EncoderBlk(self.model_num_blocks, self.model_num_conv_layers, self.emb_kernel_size, self.enc_dim, self.is_training)
def parse_configuration(): """ Parse the commandline parameters and the configuration file """ parser = argparse.ArgumentParser( description='Get a secure reverse shell via z/OS FTP') parser.add_argument('config_file', type=str, help='configuration file to use') parser.add_argument( '-d', '--detached', default=False, action='store_true', help= 'run in detached mode, use the config file (from -s) to run the reverse shell manager on another machine' ) parser.add_argument( '-s', '--savestate', type=str, help= 'save the running configuration (including credentials) to a config file', default=None) parser.add_argument( '-t', '--testfilename', type=str, help= 'run in test mode, creates a testing file without running the shell', default=None) parser.add_argument('-v', '--verbose', default=False, action='store_true', help='output verbose progress') cli_args = parser.parse_args() # Validate the detached mode if cli_args.detached and cli_args.savestate is None: print('You need -s for -d') sys.exit(1) # Config file parsing config_file = ConfigParser(interpolation=ExtendedInterpolation()) config_file.read(cli_args.config_file) running_config = config_file['ZOS'] try: if running_config['password'] != '': logging.warning('password in config file!') logging.warning('Consider using the interactive password request') except KeyError: password_prompt = 'Input the password for ' + running_config['username'] + \ ' on ' + running_config['hostname'] + ': ' running_config['password'] = getpass.getpass(password_prompt) return (config_file, cli_args)
def init(self, filename: str = None) -> bool: defaults = { ENABLED: 'false', INTEGRITY_CHECK: 'sha256', MAX_SIZE: '10MB', } self.parser = ConfigParser(defaults=defaults, interpolation=ExtendedInterpolation()) for c in [malwarepatrol, sanesecurity, securiteinfo, urlhaus]: self.parser.read_dict(c) if filename: parsed = self.parser.read([filename]) return len(parsed) == 1 return True
def init_config(): config = configparser.ConfigParser( interpolation=ExtendedInterpolation(), converters={ 'datetime': lambda s: datetime.datetime.strptime(s, '%Y-%m-%d %H:%M'), 'list': lambda s: s.split(',') }, allow_no_value=True # converters={'datetime': datetime.datetime} ) return config
def test_extended_interpolation(): config = LocalConfig(interpolation=ExtendedInterpolation()) config.read(""" [server] host=0.0.0.0 port=5000 [client] server_host=${server:host} server_port=${server:port} """) assert config.client.server_host == '0.0.0.0' assert config.client.server_port == 5000
def fromFile(filename): config = ConfigParser(interpolation=ExtendedInterpolation()) config.read(filename) episodeData = config['metadata'] if 'metadata' in config else {} seasonData = config['Season'] if 'Season' in config else {} showData = config['Show'] if 'Show' in config else {} seasonMetadata = BaseMetadata(seasonData) showMetadata = BaseMetadata(showData) return EpisodeMetadata(episodeData, seasonMetadata=seasonMetadata, showMetadata=showMetadata)
def get_student_list(self): log_path = self.feedback_dir + '/' + self.FEEDBACK_LOG_NAME student_list = [] try: parser = ConfigParser(interpolation=ExtendedInterpolation()) parser.read(log_path) for section in parser.sections(): student_list.append(section) except: print('You currently have not assigned any feedback') return student_list
def data(self): if self._data: return self._data config = ConfigParser(interpolation=ExtendedInterpolation()) config.optionxform = str with open(self.file_location) as f: config.read_file(f) sources_dict = OrderedDict() for name, value in config['sources'].items(): source = Source().create_from_string(value) sources_dict[name] = source self._data = sources_dict return self._data
def get_config_var(sec, var): try: path_config = "haproxy-webintarface.config" config = ConfigParser(interpolation=ExtendedInterpolation()) config.read(path_config) except: print('<center><div class="alert alert-danger">Check the config file, whether it exists and the path. Must be in: app/haproxy-webintarface.config</div>') try: var = config.get(sec, var) return var except: print('<center><div class="alert alert-danger">Check the config file. Presence section %s and parameter %s</div>' % (sec, var))
def read_script_config(): """ This fucntion reads default CAD tools path from configuration file """ global config, cad_tools config = ConfigParser(interpolation=ExtendedInterpolation()) config.read_dict(script_env_vars) default_cad_tool_conf = os.path.join(flow_script_dir, os.pardir, 'misc', 'fpgaflow_default_tool_path.conf') config.read_file(open(default_cad_tool_conf)) if args.flow_config: config.read_file(open(args.flow_config)) if not "CAD_TOOLS_PATH" in config.sections(): clean_up_and_exit("Missing CAD_TOOLS_PATH in openfpga_flow config") cad_tools = config["CAD_TOOLS_PATH"]
def _newConfig(fn): c = ConfigParser( defaults = None, allow_no_value = False, delimiters = ('=',), comment_prefixes = ('#',), strict = True, interpolation = ExtendedInterpolation(), default_section = 'default', ) with open(fn, 'r') as fh: c.read_file(fh) return c
def __init__(self, cfg): logger.debug("init cfg %s", cfg) self.cfg = cfg # create a parser object self.parser = BoardCfgParser(os.environ, interpolation=ExtendedInterpolation()) self.parser.read(cfg) self.board_options = self.parser.get_board_options() self.build_options = self.parser.get_build_options() self.rootfs_options = self.parser.get_rootfs_options() self.bootimg_options = self.parser.get_bootimg_options()
def __init__(self, fname): super(Config, self).__init__() self.config = ConfigParser(interpolation=ExtendedInterpolation()) self.config.read(fname) kwargs = dict() for section in self.config.sections(): for option, value in self.config.items(section): try: kwargs[option] = literal_eval(value) except Exception: kwargs[option] = value self.kwargs = kwargs
def get_config(config_uid): """Function returns one of the configs :param config_uid: API, GUI, ES or ENV value to read selected config :return: config """ if not exists(CONFIGS[config_uid]): raise AssertionError('Configuration file does n\'t exists: {}'.format( CONFIGS[config_uid])) config = ConfigParser(interpolation=ExtendedInterpolation(), allow_no_value=True) config.read(CONFIGS[config_uid]) return config
def __init__(self, config, section, main_loop): self.main_loop = main_loop self.config = config self.config_section = section keymap_file = config[section].get("keymap", None) self.keymap_config = ConfigParser( interpolation=ExtendedInterpolation(), default_section="defaults") self.set_keymap_defaults() if keymap_file: keymap_file = os.path.expanduser(keymap_file) if not self.keymap_config.read(keymap_file): logger.warning("Could not load keymap: %r", keymap_file) self.load_keymap()
def __init__(self, identifier, install_path, proj_path, default_config_string): self.IDENTIFIER = identifier if not install_path: install_path = os.path.dirname(__file__) #install_path = os.path.expanduser(install_path) if not proj_path: proj_path = os.getcwd() config_path = pathlib.Path(proj_path).joinpath("config") _config = ConfigParser(interpolation=ExtendedInterpolation()) #Dette trikset gjør at option-nøkkel forblir case-sensitive _config.optionxform = str self._config = _config _config.add_section("install") _config.set("install", "path", install_path) _config.add_section("proj") _config.set("proj", "path", proj_path) _config.read_string(default_config_string) # dersom getfilesystemencoding returnerer ascii så vil vi ikke kunne lese konfigfiler med eksotiske tegn. # vi må derfor velge noe annet; getdefaultencoding() er et godt valg self.conf_encoding = sys.getfilesystemencoding() if self.conf_encoding == "ascii": self.conf_encoding = sys.getdefaultencoding() #self.read("{install_path}/data".format(install_path=install_path)) self.read_default(config_path) self.verify(proj_path, config_path) if "config" in self._config: for ident, path in self.get_dict("config").items(): self._config.read(path, encoding=self.conf_encoding) # denne filen skal ikke være redigerbare fra web eller filsystem # brukes til å låse bestemte konfiger. må derfor leses sist. self._config.read( "{config_path}/default.lock".format(config_path=config_path), encoding=self.conf_encoding) # liste over konfiger som ikke skal være synlig i webadmin # settes med set_hidden_value-funksjonen self._hidden = {}
def quoteWatchInit(): print("in quoteWatchInit\r\n") #cfg = ConfigParser() cfg = ConfigParser(interpolation=ExtendedInterpolation()) try: cfg.read('./config.ini') cfg.sections() cfgSet.api_key = str(cfg.get('Unity','api_key')) cfgSet.seceret_key = str(cfg.get('Unity','seceret_key')) cfgSet.passphrase = str(cfg.get('Unity','passphrase')) print(cfgSet.api_key+","+cfgSet.seceret_key+","+cfgSet.passphrase) logging.info(cfgSet.api_key+","+cfgSet.seceret_key+","+cfgSet.passphrase) cfgSet.phoneKey = str(cfg.get('Unity','phoneKey')) print(cfgSet.phoneKey) logging.info(cfgSet.phoneKey) cfgSet.phone = cfg.get('Unity','phone') channel = cfg.get('Unity','subscribe') cfgSet.subscribe = channel.split(',') logging.info(cfgSet.subscribe) print(cfgSet.subscribe) logging.info(str(cfgSet.phone)) cfgSet.symbol1 = str(cfg.get('pair1','symbol')) cfgSet.priceHigh1 = cfg.getfloat('pair1','priceHigh') cfgSet.priceLow1 = cfg.getfloat('pair1','priceLow') logging.info("pair1: "+str(cfgSet.symbol1)+" "+str(cfgSet.priceHigh1)+" "+str(cfgSet.priceLow1)) print(("pair1: "+str(cfgSet.symbol1)+" "+str(cfgSet.priceHigh1)+" "+str(cfgSet.priceLow1))) cfgSet.symbol2 = str(cfg.get('pair2','symbol')) cfgSet.priceHigh2 = cfg.getfloat('pair2','priceHigh') cfgSet.priceLow2 = cfg.getfloat('pair2','priceLow') logging.info("pair2: "+str(cfgSet.symbol2)+" "+str(cfgSet.priceHigh2)+" "+str(cfgSet.priceLow2 )) print("pair2: "+str(cfgSet.symbol2)+" "+str(cfgSet.priceHigh2)+" "+str(cfgSet.priceLow2 )) cfgSet.symbol3 = str(cfg.get('pair3','symbol')) cfgSet.priceHigh3 = cfg.getfloat('pair3','priceHigh') cfgSet.priceLow3 = cfg.getfloat('pair3','priceLow') logging.info("pair3: "+str(cfgSet.symbol3)+" "+str(cfgSet.priceHigh3)+" "+str(cfgSet.priceLow3)) print("pair3: "+str(cfgSet.symbol3)+" "+str(cfgSet.priceHigh3)+" "+str(cfgSet.priceLow3)) cfgSet.symbol4 = str(cfg.get('pair4','symbol')) cfgSet.priceHigh4 = cfg.getfloat('pair4','priceHigh') cfgSet.priceLow4 = cfg.getfloat('pair4','priceLow') logging.info("pair4: "+str(cfgSet.symbol4)+" "+str(cfgSet.priceHigh4)+" "+str(cfgSet.priceLow4)) print("pair4: "+str(cfgSet.symbol4)+" "+str(cfgSet.priceHigh4)+" "+str(cfgSet.priceLow4)) except Exception as e: logging.error("config error ",str(e)) print("config error ",str(e))
def set_state(self, entity, attributes, old, new, kwargs): self.config = ConfigParser(delimiters=(':', '='), interpolation=ExtendedInterpolation()) self.config.read(self.cfg_file) token = self.config['TOKEN']['token'] home = self.config['HOME']['home_id'] if (new == 'home' and old != new): friendly_name = self.get_state(entity, attribute='friendly_name') person_id = self.config['SENSORS']['sensor_' + friendly_name.lower()] params = { 'access_token': token, 'home_id': home, 'person_ids': '[ "' + person_id + '" ]' } try: response = requests.post( "https://api.netatmo.com/api/setpersonshome", params=params) response.raise_for_status() status = response.json()["status"] self.log("Setting {} home...".format( friendly_name.capitalize())) self.log("Response: {}".format(status.upper())) except requests.exceptions.HTTPError as error: self.log(error.response.status_code, error.response.text) elif (new == 'not_home' and old == 'home'): friendly_name = self.get_state(entity, attribute='friendly_name') person_id = self.config['SENSORS']['sensor_' + friendly_name.lower()] params = { 'access_token': token, 'home_id': home, 'person_id': person_id } try: response = requests.post( "https://api.netatmo.com/api/setpersonsaway", params=params) response.raise_for_status() status = response.json()["status"] self.log("Setting {} away...".format( friendly_name.capitalize())) self.log("Response: {}".format(status.upper())) except requests.exceptions.HTTPError as error: self.log(error.response.status_code, error.response.text)
def get_config_var(sec, var): from configparser import ConfigParser, ExtendedInterpolation try: path_config = "/var/www/haproxy-wi/app/haproxy-wi.cfg" config = ConfigParser(interpolation=ExtendedInterpolation()) config.read(path_config) except: print('Content-type: text/html\n') print('<center><div class="alert alert-danger">Check the config file, whether it exists and the path. Must be: app/haproxy-webintarface.config</div>') try: return config.get(sec, var) except: print('Content-type: text/html\n') print('<center><div class="alert alert-danger">Check the config file. Presence section %s and parameter %s</div>' % (sec, var))
def get_plugins_path(config_path=None, plugin_path=None): if not config_path: config_path = str(Path.home().joinpath('.osmedeus/config.conf')) if plugin_path: return Path(plugin_path) if os.path.isfile(os.path.normpath(config_path)): core_config = ConfigParser(interpolation=ExtendedInterpolation()) core_config.read(config_path) plugin_path = Path(core_config.get('Enviroments', 'plugins_path')) else: plugin_path = BASE_DIR.joinpath('plugins') return plugin_path