def setup_nodes(cp: ConfigParser) -> None:
    print('==== Nodes')
    print('To produce alerts, the alerter needs something to monitor! The list '
          'of nodes to be included in the monitoring will now be set up. This '
          'includes validators, sentries, and any full nodes that can be used '
          'as data sources to monitor from the network\'s perspective, together'
          ' with the Node Exporter URL to be used to monitor the system. You '
          'may include nodes from multiple networks in any order; PANIC '
          'will figure out which network they belong to when you run it. Node '
          'names must be set identical to the ones previously set in the API '
          'Server/s!')

    # Check if list already set up
    if len(cp.sections()) > 0 and \
            not yn_prompt('The list of nodes is already set up. Do you wish to '
                          'clear this list? You will then be asked to set up a '
                          'new list of nodes, if you wish to do so (Y/n)\n'):
        return

    # Clear config and initialise new list
    cp.clear()
    nodes = []

    # Ask if they want to set it up
    if not yn_prompt('Do you wish to set up the list of nodes? (Y/n)\n'):
        return

    # Get node details and append them to the list of nodes
    while True:
        # Check that API is running by retrieving some data which will be used.
        oasis_api_data_wrapper = OasisApiWrapper(DUMMY_LOGGER)
        node = get_node(nodes, oasis_api_data_wrapper)

        if node is not None:
            nodes.append(node)
            if node.node_is_validator:
                print('Successfully added validator node.')
            else:
                print('Successfully added full node.')

        if not yn_prompt('Do you want to add another node? (Y/n)\n'):
            break

    # Add nodes to config
    for i, node in enumerate(nodes):
        section = 'node_' + str(i)
        cp.add_section(section)
        cp[section]['node_name'] = node.node_name
        cp[section]['chain_name'] = node.chain_name
        cp[section]['node_api_url'] = node.node_api_url
        cp[section]['node_public_key'] = node.node_public_key
        cp[section]['node_is_validator'] = \
            'true' if node.node_is_validator else 'false'
        cp[section]['node_exporter_url'] = node.node_exporter_url
        cp[section]['monitor_node'] = \
            'true' if node.monitor_node else 'false'
        cp[section]['is_archive_node'] = \
            'true' if node.is_archive_node else 'false'
        cp[section]['use_as_data_source'] = \
            'true' if node.use_as_data_source else 'false'
Ejemplo n.º 2
0
def start_monitor(config_path, metric_config_path):
    """
    Monitor database metrics based on parameter from config.
    :param config_path: string, config path. 
    :param metric_config_path: string, metric config path.
    :return: NA
    """
    if not os.path.exists(config_path):
        logger.error(
            '{config_path} is not exist.'.format(config_path=config_path))
        return
    if not os.path.exists(metric_config_path):
        logger.error('{metric_config_path} is not exist.'.format(
            metric_config_path=metric_config_path))
        return

    config = ConfigParser()
    config.read(config_path)
    check_status, required_parameters = check_required_parameter(config)
    if check_status == 'fail':
        return

    monitor_service = Monitor()
    config.clear()
    config.read(metric_config_path)
    metric_task_from_py = get_funcs(metric_task)
    metric_name_from_py = [item[0] for item in metric_task_from_py]
    metric_name_from_config = config.sections()

    for metric_name in set(metric_name_from_config).union(
            set(metric_name_from_py)):
        if metric_name in set(metric_name_from_config).difference(
                set(metric_name_from_py)):
            logger.error(
                "'{metric_name}' is not defined in 'task/metric_task.py', skip monitor."
                .format(metric_name=metric_name))
            continue
        if metric_name in set(metric_name_from_py).difference(
                set(metric_name_from_config)):
            logger.error(
                "'{metric_name}' has no config information in 'task/metric_config.conf', skip monitor."
                .format(metric_name=metric_name))
            continue
        if metric_name in set(metric_name_from_py).intersection(
                set(metric_name_from_config)):
            kwargs = {}
            optional_parameters = check_optional_parameter(config, metric_name)
            check_status, detect_basis_parameter = check_detect_basis(
                config, metric_name)
            if check_status == 'fail':
                continue
            kwargs.update(**optional_parameters)
            kwargs.update(**required_parameters)
            kwargs.update(**detect_basis_parameter)
            kwargs['data_handler'] = DataHandler
            kwargs['forecast_alg'] = forecast_algorithm(
                kwargs['forecast_alg'])()
            kwargs['metric_name'] = metric_name
            monitor_service.apply(Forecastor(**kwargs))
    monitor_service.start()
Ejemplo n.º 3
0
class Config(metaclass=Singleton):
    """global kks config"""
    def __init__(self):
        self._file = config_directory() / 'config.ini'
        self._config = ConfigParser()
        if self._file.is_file():
            self._config.read(self._file)
        # delete legacy section
        if self._config.has_section('Links'):
            self._config.remove_section('Links')
            self.save()

    def save(self):
        with self._file.open('w') as f:
            self._config.write(f)

    def reload(self):
        """force reload from disk"""
        if self._file.is_file():
            self._config.read(self._file)
        else:
            self._config.clear()

    def __getattribute__(self, key):
        if key in ConfigModel.__annotations__:
            section_type = ConfigModel.__annotations__[key]
            return section_type(self._config, key)
        return super().__getattribute__(key)

    def __delattr__(self, key):
        if key in ConfigModel.__annotations__:
            self._config.remove_section(Section.canonical_name(key))
        else:
            super().__delattr__(key)
Ejemplo n.º 4
0
def get_all_ra_config(def_buttons: list) -> list:
    """
    Reads the RetroArch's gamepad auto-configuration folder
    and creates a list with of the configured joystick devices as InputDev objects
    """
    ra_config_list = []
    # add a generic mapping at index 0, to be used for un-configured joysticks
    generic_dev = InputDev("*", "*")
    generic_dev.add_mappings(
        {},  # no axis
        {0: [(1, 'up'), (8, 'left'), (4, 'down'), (2, 'right')]},  # D-Pad as 'hat0'
        {0: 'b', 1: 'a', 3: 'y', 4: 'x'}  # 4 buttons
    )
    ra_config_list.append(generic_dev)
    js_cfg_dir = CONFIG_DIR + '/all/retroarch-joypads/'

    config = ConfigParser(delimiters="=", strict=False, interpolation=None)
    for file in os.listdir(js_cfg_dir):
        # skip non '.cfg' files
        if not file.endswith('.cfg') or file.startswith('.'):
            continue

        with open(js_cfg_dir + file, 'r') as cfg_file:
            try:
                config.clear()
                # ConfigParser needs a section, make up a section to appease it
                config.read_string('[device]\n' + cfg_file.read())
                conf_vals = config['device']
                dev_name = conf_vals['input_device'].strip('"')

                # translate the RetroArch inputs from the configuration file
                axis, buttons, hats = {}, {}, {}
                for i in conf_vals:
                    if i.startswith('input') and (i.endswith('btn') or i.endswith('axis')):
                        input_type, input_index, input_value = ra_input_parse(i, conf_vals[i].strip('"'))

                        # check if the input is mapped to one of the events we recognize
                        event_name = ra_event_map(i)
                        if event_name not in def_buttons:
                            continue
                        if input_type == 'button':
                            buttons[input_index] = event_name
                        elif input_type == 'hat':
                            hats.setdefault(input_index, []).append((input_value, event_name))
                        elif input_type == 'axis':
                            axis.setdefault(input_index, []).append((input_value, event_name))
                        else:
                            continue
                ra_dev_config = InputDev(dev_name, None)
                ra_dev_config.add_mappings(axis, buttons, hats)
                ra_config_list.append(ra_dev_config)
            except Exception as e:
                LOG.warning(f'Parsing error for {file}: {e}')
                continue

    return ra_config_list
Ejemplo n.º 5
0
def setup_nodes(cp: ConfigParser) -> None:
    print('==== Nodes')
    print('To produce alerts, the alerter needs something to monitor! The list '
          'of nodes to be included in the monitoring will now be set up. This '
          'includes validators, sentries, and any full nodes that can be used '
          'as a data source to monitor from the network\'s perspective. You '
          'may include nodes from multiple networks in any order; P.A.N.I.C. '
          'will figure out which network they belong to when you run it. Node '
          'names must be unique!')

    # Check if list already set up
    already_set_up = len(cp.sections()) > 0
    if already_set_up:
        if not yn_prompt(
                'The list of nodes is already set up. Do you wish '
                'to replace this list with a new one? (Y/n)\n'):
            return

    # Otherwise ask if they want to set it up
    if not already_set_up and \
            not yn_prompt('Do you wish to set up the list of nodes? (Y/n)\n'):
        return

    # Clear config and initialise new list
    cp.clear()
    nodes = []

    # Get node details and append them to the list of nodes
    while True:
        node = get_node(nodes)
        if node is not None:
            nodes.append(node)
            if node.node_is_validator:
                print('Successfully added validator node.')
            else:
                print('Successfully added full node.')

        if not yn_prompt('Do you want to add another node? (Y/n)\n'):
            break

    # Add nodes to config
    cp.clear()
    for i, node in enumerate(nodes):
        section = 'node_' + str(i)
        cp.add_section(section)
        cp[section]['node_name'] = node.node_name
        cp[section]['node_rpc_url'] = node.node_rpc_url
        cp[section]['node_is_validator'] = \
            'true' if node.node_is_validator else 'false'
        cp[section]['include_in_node_monitor'] = \
            'true' if node.include_in_node_monitor else 'false'
        cp[section]['include_in_network_monitor'] = \
            'true' if node.include_in_network_monitor else 'false'
Ejemplo n.º 6
0
    def setUp(self):
        logging.basicConfig(format='%(levelname)s:%(message)s',
                            level=logging.DEBUG)  # configure logging

        self.gp = gridpi_core.System()

        # configure asset models
        parser = ConfigParser()
        parser.read_dict({
            'FEEDER': {
                'class_name': 'VirtualFeeder',
                'name': 'feeder'
            },
            'ENERGY_STORAGE': {
                'class_name': 'VirtualEnergyStorage',
                'name': 'inverter'
            },
            'GRID_INTERTIE': {
                'class_name': 'VirtualGridIntertie',
                'name': 'grid'
            }
        })

        asset_factory = model_core.AssetFactory(
        )  # Create Asset Factory object
        for cfg in parser.sections(
        ):  # Add models to System, The asset factory acts on a configuration
            self.gp.add_asset(asset_factory.factory(parser[cfg]))
        del asset_factory

        # configure persistence model
        parser.clear()
        parser.read_dict({
            'PERSISTENCE': {
                'class_name': 'DBSQLite3',
                'local_path': '/database/GridPi.sqlite',
                'empty_database_on_start': 1
            }
        })
        persistence_factory = persistence_core.PersistenceFactory()
        for cfg in parser.sections():
            self.db = persistence_factory.factory(parser[cfg])
        del persistence_factory

        asset_refs = [x for x in self.gp.assets]

        for asset in asset_refs:
            params = [p for p in asset.status.keys()]
            self.db.addGroup(asset.config['name'], *params)
Ejemplo n.º 7
0
def main(*args, **kwargs):
    """ Initalize System object.
        Create the system object. Load system assets, modules, and tagbus. Register the parameters of each asset with the
        tagbus object.
    """
    gp = gridpi_core.System()  # Create System container object
    vs = virtual_system.Virtual_System(
        gp.state_machine, gp.asset_container)  # virtual system for testing

    # read asset config.ini
    bootstrap_parser = kwargs['bootstrap']
    parser = ConfigParser()
    parser.read(bootstrap_parser['BOOTSTRAP']['asset_cfg_local_path'])
    asset_factory = model_core.AssetFactory()  # Create Asset Factory object
    for cfg in parser.sections():
        gp.add_asset(asset_factory.factory(parser[cfg], virtual_system=vs))
    del asset_factory

    # read process config.ini
    parser.clear()
    parser.read(bootstrap_parser['BOOTSTRAP']['process_cfg_local_path'])
    process_factory = process_core.ProcessFactory()
    for cfg in parser.sections():
        gp.add_process(process_factory.factory(parser[cfg]))
    del process_factory

    # read persistent storage config.ini
    parser.clear()
    parser.read(bootstrap_parser['BOOTSTRAP']['persistence_cfg_local_path'])
    persistence_factory = persistence_core.PersistenceFactory()
    for cfg in parser.sections():
        db = persistence_factory.factory(parser[cfg])
    del persistence_factory
    del bootstrap_parser
    del parser

    gp.process_container.sort()  # Sort the process tags by dependency

    loop = asyncio.get_event_loop()  # Get event loop
    loop.create_task(update_assets_loop(gp, poll_rate=.1))
    loop.create_task(update_persistent_storage(gp, db, .2))
    loop.create_task(update_virtual_system(vs))

    try:
        loop.run_forever()
    except:
        loop.close()
Ejemplo n.º 8
0
class formula_class:

    loan_id = None

    # def __init__(self):
    #     self.con = ConfigParser
    #     self.con.read("config/config.conf", encoding="utf-8")

    def formula_open(self):
        self.con = ConfigParser()
        self.con.read("config/config.conf", encoding="utf-8")

    def formula_intermediate(self, formula, data):
        print(formula)
        print(data)
        object = re.search(formula, data)

        obj_data = object.group(1)
        print(obj_data)

        conf_data = self.con.get("data", obj_data)
        print(conf_data)
        print(data)
        print(formula)
        print(type(conf_data))
        print(type(data))
        print(type(formula))

        if obj_data == "loan_id":
            my = mysql()
            my.mysql_connect(host="test.lemonban.com",
                             user="******",
                             password="******",
                             port=3306)
            conf_data = my.mysql_sql(conf_data)
            print(conf_data)
            print(data)
            print(formula)
            print(type(conf_data))
            print(type(data))
            print(type(formula))
        return re.sub(formula, str(conf_data), data, count=1)

    def formula_clear(self):
        self.con.clear()
Ejemplo n.º 9
0
def setup_nodes(cp: ConfigParser) -> None:

    print('==== Nodes')
    print(
        'To retrieve data from nodes, the API needs to know where to find '
        'the nodes! The list of nodes the API will connect to will now be '
        'set up. This includes validators, sentries, and any full nodes that '
        'can be used as a data source to retrieve data from the network\'s '
        'perspective. Node names must be unique! The list of API nodes must '
        'also include their Prometheus endpoints.')

    # Check if list already set up
    already_set_up = len(cp.sections()) > 0
    if already_set_up:
        if not yn_prompt('The list of nodes is already set up. Do you wish '
                         'to replace this list with a new one? (Y/n)\n'):
            return

    # Otherwise ask if they want to set it up
    if not already_set_up and \
            not yn_prompt('Do you wish to set up the list of nodes? (Y/n)\n'):
        return

    # Clear config and initialise new list
    cp.clear()
    nodes = []

    # Get node details and append them to the list of nodes
    while True:
        node = get_node(nodes)
        if node is not None:
            nodes.append(node)
            print('Successfully added node.')

        if not yn_prompt('Do you want to add another node? (Y/n)\n'):
            break

    # Add nodes to config
    for i, node in enumerate(nodes):
        section = 'node_' + str(i)
        cp.add_section(section)
        cp[section]['node_name'] = node.node_name
        cp[section]['isocket_path'] = node.isocket_path
        cp[section]['prometheus_url'] = node.prometheus_url
Ejemplo n.º 10
0
def load_assets(screen=None):
    global __screen
    __screen = screen
    config_directory = get_path(_assets_config_path)
    config = ConfigParser()
    for file in list_dir(config_directory):
        path = get_path(_assets_config_path + file)
        config.read(path)
        for section in config.sections():
            _assets_switch[config.get(section, "type")](section, dict(config.items(section)))
        config.clear()

    global theme
    theme = pgui.Theme(get_path("data/assets/default/"))

    print(music)
    print(texture)
    print(sound)
    print(font)
Ejemplo n.º 11
0
def setup_nodes(cp: ConfigParser) -> None:

    print('==== Sentry')
    print('To retrieve data from sentries, the API needs to know their '
          'endpoints! The list of sentry endpoints the API will connect '
          'to will now be set-up. Sentry names must be unique.')

    # Check if list already set up
    already_set_up = len(cp.sections()) > 0
    if already_set_up:
        if not yn_prompt(
                'The list of sentry endpoints is already set up. '
                'Do you wish to replace this list with a new one? (Y/n)\n'):
            return

    # Otherwise ask if they want to set it up
    if not already_set_up and \
            not yn_prompt('Do you wish to set up the '
                'list of sentry endpoints? (Y/n)\n'):
        return

    # Clear config and initialise new list
    cp.clear()
    nodes = []

    # Get node details and append them to the list of nodes
    while True:
        node = get_node(nodes)
        if node is not None:
            nodes.append(node)
            print('Successfully added Sentry node.')

        if not yn_prompt('Do you want to add another'
            'sentry endpoint? (Y/n)\n'):
            break

    # Add nodes to config
    for i, node in enumerate(nodes):
        section = 'node_' + str(i)
        cp.add_section(section)
        cp[section]['node_name'] = node.node_name
        cp[section]['ext_url'] = node.ext_url
        cp[section]['tls_path'] = node.tls_path
Ejemplo n.º 12
0
def setup_repos(cp: ConfigParser) -> None:
    print('==== GitHub Repositories')
    print(
        'The GitHub monitor alerts on new releases in repositories. The list '
        'of GitHub repositories to monitor will now be set up.')

    # Check if list already set up
    already_set_up = len(cp.sections()) > 0
    if already_set_up:
        if not yn_prompt('The list of repositories is already set up. Do you '
                         'wish to replace this list with a new one? (Y/n)\n'):
            return

    # Otherwise ask if they want to set it up
    if not already_set_up and \
            not yn_prompt('Do you wish to set up the list of repos? (Y/n)\n'):
        return

    # Clear config and initialise new list
    cp.clear()
    repos = []

    # Get repository details and append them to the list of repositories
    while True:
        repo = get_repo()
        if repo is not None:
            repos.append(repo)
            print('Successfully added repository.')

        if not yn_prompt('Do you want to add another repo? (Y/n)\n'):
            break

    # Add repos to config
    cp.clear()
    for i, repo in enumerate(repos):
        section = 'repo_' + str(i)
        cp.add_section(section)
        cp[section]['repo_name'] = repo.repo_name
        cp[section]['repo_page'] = repo.repo_page
        cp[section]['include_in_github_monitor'] = \
            'true' if repo.include_in_github_monitor else 'false'
Ejemplo n.º 13
0
class ContentParser:
    FILE_EXTENSIONS = []

    def __init__(self):
        self.configparser = ConfigParser(interpolation=None)

    def parse_meta_data(self, fp, content):
        meta_data_buffer = StringIO('[meta]\n')
        meta_data_buffer.read()

        empty_lines = 0

        while True:
            line = fp.readline()

            if not line:  # eof
                break

            if not line.strip():
                empty_lines += 1

            else:
                empty_lines = 0

            if empty_lines == 2:
                break

            meta_data_buffer.write(line)

        meta_data_buffer.seek(0)

        self.configparser.clear()
        self.configparser.read_file(meta_data_buffer)

        for option in self.configparser.options('meta'):
            content[option] = self.configparser.get('meta', option)

    def parse(self, fp, content):
        self.parse_meta_data(fp, content)

        content['content_body'] = fp.read().strip()
Ejemplo n.º 14
0
class formula_re:
    def re_start(self, name):
        self.config = ConfigParser()
        print(os_path.conf_path)
        print(name)
        self.config.read(
            R"C:\Users\Administrator\PycharmProjects\python_15\new_1\config\OL.conf",
            encoding="utf-8")

    def replace(self, formula, data):

        match_object = re.search(formula, data)

        data_1 = match_object.group(1)

        con_gdata = self.config.get("data", data_1)

        return re.sub(formula, con_gdata, data, count=1)

    def re_clear(self):
        self.config.clear()
Ejemplo n.º 15
0
    def read_config(self,
                    config_file,
                    basedir=None,
                    init_obslocs_ungridded=False,
                    init_data_search_dirs=False):
        #Read and import paths from ini file

        if not os.path.isfile(config_file):
            raise IOError("Configuration file paths.ini at %s does not exist "
                          "or is not a file" % config_file)

        if init_obslocs_ungridded:
            self.OBSLOCS_UNGRIDDED = od()
        if init_data_search_dirs:
            self._search_dirs = []

        cr = ConfigParser()
        cr.read(config_file)
        #init base directories for Model data
        if cr.has_section('modelfolders'):
            self._add_searchdirs(cr, basedir)

        if cr.has_section('obsfolders'):
            self._add_obsconfig(cr, basedir)

        if cr.has_section('outputfolders'):
            self._init_output_folders_from_cfg(cr)

        if cr.has_section('supplfolders'):
            if basedir is None and 'BASEDIR' in cr['supplfolders']:
                basedir = cr['supplfolders']['BASEDIR']

            for name, path in cr['supplfolders'].items():
                if '${BASEDIR}' in path:
                    path = path.replace('${BASEDIR}', basedir)
                self.SUPPLDIRS[name] = path

        cr.clear()
        self.GRID_IO.load_aerocom_default()
        self.last_config_file = config_file
Ejemplo n.º 16
0
def setup_repos(cp: ConfigParser) -> None:
    print('==== GitHub Repositories')
    print(
        'The GitHub monitor alerts on new releases in repositories. The list '
        'of GitHub repositories to monitor will now be set up.')

    # Check if list already set up
    if len(cp.sections()) > 0 and \
            not yn_prompt('The list of repositories is already set up. Do you '
                          'wish to clear this list? You will then be asked to '
                          'set up a new list, if you wish to do so (Y/n)\n'):
        return

    # Clear config and initialise new list
    cp.clear()
    repos = []

    # Ask if they want to set it up
    if not yn_prompt('Do you wish to set up the list of repos? (Y/n)\n'):
        return

    # Get repository details and append them to the list of repositories
    while True:
        repo = get_repo(repos)
        if repo is not None:
            repos.append(repo)
            print('Successfully added repository.')

        if not yn_prompt('Do you want to add another repo? (Y/n)\n'):
            break

    # Add repos to config
    for i, repo in enumerate(repos):
        section = 'repo_' + str(i)
        cp.add_section(section)
        cp[section]['repo_name'] = repo.repo_name
        cp[section]['repo_page'] = repo.repo_page
        cp[section]['monitor_repo'] = str(repo.monitor_repo)
Ejemplo n.º 17
0
class ConfigLoader:
    def __init__(self) -> None:
        super().__init__()
        self.configParser = ConfigParser()

    def read(self, configurationPath: str):
        self.configParser.read(configurationPath)

    def clear(self):
        self.configParser.clear()

    def get(self, section, item, default):
        try:
            if type(default) == bool:
                return self.configParser.getboolean(section, item)
            elif type(default) == int:
                return self.configParser.getint(section, item)
            elif type(default) == float:
                return self.configParser.getfloat(section, item)
            else:
                return self.configParser.get(section, item)
        except Error:
            return default
Ejemplo n.º 18
0
def excel2csv():
    try:
        config = StringIO()
        config.write('[dummysection]\n')
        with open('./config.ini', encoding="utf-8") as configfile:

            def with_BOM(s):
                return True if s == u'\ufeff' else False

            if not with_BOM(configfile.read(1)):
                configfile.seek(0)
            config.write(configfile.read())
            config.seek(0)
            cp = ConfigParser()
            cp.read_file(config)

            csvdir = cp.get('dummysection', 'csvdir')
            srcdir = cp.get('dummysection', 'srcdir')
            for i in cp.items('dummysection'):
                print(i[0], i[1])
            config.close()
            cp.clear()

            print("excel2csv start")
            if not os.path.exists(srcdir):
                os.makedirs(srcdir)

            for dirpath, dirnames, filenames in os.walk(srcdir):
                for filename in filenames:
                    pathfile = os.path.join(dirpath, filename)
                    if filename.rfind(".xlsx") != -1:
                        process_excel(pathfile, csvdir)

            print("excel2csv end success")
    except Exception as e:
        print("excel2csv fail")
        raise Exception(e)
Ejemplo n.º 19
0
def create_schema():
    """
    this function is used to generate a new schema in the OBIA4RTM database.
    In case the schema already exists, nothing will happen.
    The schema to be created is taken from the obia4rtm_backend.cfg file

    Parameters
    ----------
    None

    Returns
    -------
    status : integer
        zero if everything was OK
    """
    status = 0
    # connect to OBIA4RTM database
    con, cursor = connect_db()
    # open a logger
    logger = get_logger()
    logger.info('Trying to setup a new schema for the OBIA4RTM database')
    # read in the obia4rtm_backend information to get the name of the schema
    # therefore the obia4rtm_backend.cfg file must be read
    install_dir = os.path.dirname(OBIA4RTM.__file__)
    home_pointer = install_dir + os.sep + 'OBIA4RTM_HOME'
    if not os.path.isfile(home_pointer):
        logger.error('Cannot determine OBIA4RTM Home directory!')
        close_logger(logger)
        sys.exit(-1)
    with open(home_pointer, "r") as data:
        obia4rtm_home = data.read()
    backend_cfg = obia4rtm_home + os.sep + 'obia4rtm_backend.cfg'
    if not os.path.isfile(backend_cfg):
        logger.error(
            'Cannot read obia4rtm_backend.cfg from {}!'.format(obia4rtm_home))
        close_logger(logger)
        sys.exit(sys_exit_message)
    # now, the cfg information can be read in using the configParser class
    parser = ConfigParser()
    try:
        parser.read(backend_cfg)
    except MissingSectionHeaderError:
        logger.error(
            'The obia4rtm_backend.cfg does not fulfil the formal requirements!',
            exc_info=True)
        close_logger(logger)
        sys.exit(-1)
    # no get the name of the schema
    schema = parser.get('schema-setting', 'schema_obia4rtm')
    try:
        assert schema is not None and schema != ''
    except AssertionError:
        logger.error(
            'The version of your obia4rtm_backend.cfg file seems to be corrupt!',
            exc_info=True)
        close_logger(logger)
        sys.exit(sys_exit_message)
    # if the schema name is OK, the schema can be created
    # if the schema already exists in the current database, nothing will happen
    sql = 'CREATE SCHEMA IF NOT EXISTS {};'.format(schema)
    cursor.execute(sql)
    con.commit()
    # enable PostGIS and HSTORE extension
    # enable the PostGIS extension
    # in case it fails it is most likely because the extension was almost
    # enabled as it should
    sql = "CREATE EXTENSION PostGIS;"
    try:
        cursor.execute(sql)
        con.commit()
    except (ProgrammingError, DatabaseError):
        logger.info("PostGIS already enabled!")
        con.rollback()
        pass
    # enable the HSTORE extension
    sql = "CREATE EXTENSION HSTORE;"
    try:
        cursor.execute(sql)
        con.commit()
    except (ProgrammingError, DatabaseError):
        logger.error("HSTORE already enabled!")
        con.rollback()
        pass

    logger.info(
        "Successfully created schema '{}' in current OBIA4RTM database!".
        format(schema))
    # after that the schema-specific tables are created that are required
    # in OBIA4RTM
    sql_home = install_dir + os.sep + 'SQL' + os.sep + 'Tables'
    # the tables 's2_inversion_results, s2_lookuptable, s2_objects and s2_inversion_mapping
    # must be created within the schema
    # check if the tables already exist before trying to create them
    sql_scripts = [
        's2_lookuptable.sql', 's2_inversion_results.sql', 's2_objects.sql',
        'inversion_mapping.sql'
    ]
    # go through the config file to get the table-names
    table_names = []
    table_names.append(parser.get('schema-setting', 'table_lookuptabe'))
    table_names.append(parser.get('schema-setting', 'table_inv_results'))
    table_names.append(parser.get('schema-setting', 'table_object_spectra'))
    table_names.append(parser.get('schema-setting', 'table_inv_mapping'))
    # the parser can be cleared now as all information is read
    parser.clear()
    # iterate through the 4 scripts to create the tables given they not exist
    for index in range(len(sql_scripts)):
        sql_script = sql_home + os.sep + sql_scripts[index]
        table_name = table_names[index]
        # check if the table already exists
        exists = check_if_exists(schema, table_name, cursor)
        # if already exists table log a warning and continue with the next table
        if exists:
            logger.warning(
                "Table '{0}' already exists in schema '{1}' - skipping".format(
                    table_name, schema))
            continue
        # else create the table
        # get the corresponding sql-statment and try to execute it
        sql_statement = create_sql_statement(sql_script, schema, table_name,
                                             logger)
        try:
            cursor.execute(sql_statement)
            con.commit()
        except (DatabaseError, ProgrammingError):
            logger.error("Creating table '{0}' in schema '{1}' failed!".format(
                table_name, schema),
                         exc_info=True)
            close_logger(logger)
            sys.exit(sys_exit_message)
        # log success
        logger.info("Successfully created table '{0}' in schema '{1}'".format(
            table_name, schema))
    # create the RMSE function required for inverting the spectra
    fun_home = install_dir + os.sep + 'SQL' + os.sep + 'Queries_Functions'
    rmse_fun = fun_home + os.sep + 'rmse_function.sql'
    sql_statement = create_function_statement(rmse_fun, logger)
    try:
        cursor.execute(sql_statement)
        con.commit()
    except (DatabaseError, ProgrammingError):
        logger.error("Creating function '{0}' failed!".format(rmse_fun),
                     exc_info=True)
        close_logger(logger)
        sys.exit(sys_exit_message)
    # after iterating, the db connection and the logger can be close
    close_db_connection(con, cursor)
    close_logger(logger)
    return status
Ejemplo n.º 20
0
            try:
                pathToDir = Path(directory)

                if pathToDir.exists() and pathToDir.is_dir():
                    keybinds.append(
                        keyboard.add_hotkey(
                            shortcut,
                            lambda: system(f"{fileManager} {directory}")))
                elif CLIArgs.validate:
                    print(f"Bad directory path {directory}")
            except OSError:
                # This executing assumes that it is a *command* and not a directory
                # that needs to be opened.
                keybinds.append(
                    keyboard.add_hotkey(shortcut, system, args=(directory, )))
            except ValueError:
                pass

        time.sleep(float(keybindsParser["SETUP"]["rebindWaitTime"]))

        # Destroy all keybinds, so we dont end up with multiple bindings.
        for keybinding in keybinds:
            keyboard.remove_hotkey(keybinding)
            del keybinding

        keybinds = []
        keybindsParser.clear()

except KeyboardInterrupt:
    pass
Ejemplo n.º 21
0
class Config:
    def __init__(self,config_path=None,data_path=None,
                 is_dev=None,project=None):
        if is_dev is None:
            self.is_dev=orange.is_dev()
        else:
            self.is_dev=is_dev
        if project is None:
            self.project=os.path.splitext(
                os.path.basename(sys.argv[0]))[0]
        else:
            self.project=project
        self.os_name=os.name
        if self.os_name=='posix':
            config_ext='.conf'
            self.config_path=os.path.expanduser(
                "~/.%s"%(self.project))
        else:
            config_ext='.ini'
            self.config_path=os.path.join(os.getenv("APPDATA"),
                                          self.project)
        if self.is_dev:
            self.config_path=os.path.abspath("appdata")
        else:
            if config_path:
                self.config_path=config_path
        if not hasattr(self,"data_path"):
            self.data_path=self.config_path
        self.config_file=os.path.join(self.data_path,
                            self.project+config_ext)
        self.modified=False
        self.load_config()
        
    def load_config(self,files=None):
        if files is None:
            files=self.config_file
        if not hasattr(self,'parser'):
            self.parser=ConfigParser()
        self.parser.clear()
        self.parser.read(files,encoding='utf8')
        self.modified=False
        if isinstance(files,str):
            self.cur_file=files
        else:
            self.cur_file=files[-1]

    @property
    def sections(self):
        return self.parser.sections()
    
    def get(self,section):
        if self.parser.has_section(section):
            d={}
            for option,value in self.parser.items(section):
                if self.is_passwd(option):
                    value=decrypt(value)
                d[option]=value
            return d

    def update(self,section,data):
        self.modified=True
        if not self.parser.has_section(section):
            self.parser.add_section(section)
        for option,value in data.items():
            if self.is_passwd(option):
                value=encrypt(value)
            self.parser.set(section,option,str(value))
        
    def get_many(self,*sections):
        return dict([(section,self.get(section)) for section in sections])
    
    def update_many(self,datas):
        for section,data in datas.items():
            self.update(section,data)
            
    def is_passwd(self,key):
        return key.lower() in ('passwd','password')        

    def save_config(self):
        # ensure_path(os.path.dirname(self.cur_file))
        Path(self.cur_file).parent.ensure()
        if self.modified:
            with open(self.cur_file,'w',encoding='utf8')as fn:
                self.parser.write(fn)

    def init_logging(self):
        import logging
        file_name=os.path.join(self.data_path,
                               self.project+'.log')
        if self.is_dev:
            level='DEBUG'
        else:
            level='WARN'
        default={
            'filename':file_name,
            'level':level,
            'format':'%(asctime)s %(levelname)s\t%(message)s',
            'datefmt':'%Y-%m-%d %H:%M'}
        default.update(self.get('logging'))
        logging.basicConfig(**default)
Ejemplo n.º 22
0
def main():

    if len(sys.argv) != 2:
        fatal('Usage: generator <output_dir>')
    if not os.path.isdir(sys.argv[1]):
        fatal('The argument must be a directory:', sys.argv[1])

    known_apps = {}
    config = ConfigParser()

    for path in glob.iglob(os.path.join(os.path.dirname(__file__), '*.ini')):
        config.read(path)
        for app in config.sections():
            if app in known_apps:
                fatal(
                    'Duplicate entry "{}": first occurrence was "{}" then "{}"'
                    .format(app, known_apps[app]['source'], path))
            app_dict = config[app]
            if not ('config' in app_dict or 'cache' in app_dict
                    or 'local_data' in app_dict):
                fatal(
                    '{}: none of config, cache or local_data is specified for "{}"'
                    .format(path, app))
            known_apps[app] = {
                'source':
                path,
                'config':
                to_stringlist(app_dict.get('config', fallback='')),
                'cache':
                to_stringlist(app_dict.get('cache', fallback='')),
                'local_data':
                to_stringlist(app_dict.get('local_data', fallback=''))
            }
        config.clear()

    outfile = open(os.path.join(sys.argv[1], 'mknown_apps.hpp'), 'w')
    outfile.write('''\
// Generated by "harbour-mashka/known_apps/generator.py" at {}.
// Do not edit manually!

#ifndef MKNOWN_APPS_H
#define MKNOWN_APPS_H

#include <QList>
#include <QRegularExpression>

struct KnownApp
{{
    QString name;
    QStringList config;
    QStringList cache;
    QStringList local_data;
}};

inline QList<KnownApp> knownApps()
{{
    return {{
'''.format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")))

    exclude = []

    for name in sorted(known_apps.keys()):
        app = known_apps[name]
        outfile.write('''\
        {{
            QStringLiteral("{}"),
            {},
            {},
            {}
        }},
'''.format(name, to_initializer(app['config']), to_initializer(app['cache']),
           to_initializer(app['local_data'])))
        exclude += need_exclude(app['config'])
        exclude += need_exclude(app['cache'])
        exclude += need_exclude(app['local_data'])

    regex = ''
    if len(exclude) > 0:
        regex = '(QStringLiteral("("\n               "{})"))'.format(
            '|"\n               "'.join(sorted(exclude)))

    outfile.write('''\
    }};
}}

inline QRegularExpression excludeDirs()
{{
    static QRegularExpression regex{};
    return regex;
}}

#endif // MKNOWN_APPS_H
'''.format(regex))
Ejemplo n.º 23
0
class Config:
    def __init__(self, config_file: str):
        self.config_file = config_file
        self.cp = None

    @classmethod
    def parse_keys(cls, keys: str) -> Keys:
        return {int(key) for key in keys.split(',')}

    @classmethod
    def keys_to_str(cls, keys: Keys) -> str:
        return ','.join([str(key) for key in keys])

    def read(self):
        self.cp = ConfigParser()
        self.cp.read(self.config_file)

        # Video settings
        video = self.cp['video']
        self.full_screen = video['full_screen'].lower() in ['true', 'yes']
        self.frames_per_second = int(video['frames_per_second'])
        self.window_size = self.height_px = self.width_px = \
            int(video['window_size'])
        self.font = video['font']
        self.draw_fps = video['draw_fps'].lower() in ['true', 'yes']

        if self.window_size not in VALID_WINDOW_SIZES:
            raise Exception('Invalid window size; Valid sizes: {}'
                            ''.format(VALID_WINDOW_SIZES))

        # Players
        self.players = []
        player_sections = [s for s in self.cp if s.startswith('player_')]
        for ps in player_sections:
            player_name = self.cp[ps]['name']
            ctrl_up = self.parse_keys(self.cp[ps]['ctrl_up'])
            ctrl_down = self.parse_keys(self.cp[ps]['ctrl_down'])
            ctrl_left = self.parse_keys(self.cp[ps]['ctrl_left'])
            ctrl_right = self.parse_keys(self.cp[ps]['ctrl_right'])
            ctrl_pause = self.parse_keys(self.cp[ps]['ctrl_pause'])
            ctrl_boost = self.parse_keys(self.cp[ps]['ctrl_boost'])
            ctrl_shoot = self.parse_keys(self.cp[ps]['ctrl_shoot'])
            self.players.append(
                Player(player_name, ctrl_up, ctrl_down, ctrl_left, ctrl_right,
                       ctrl_pause, ctrl_boost, ctrl_shoot))

        self.all_keys = set()
        for p in self.players:
            self.all_keys = self.all_keys.union(p.all_keys)

        if len(self.players) == 0:
            raise Exception('Invalid config: zero players')

    def write(self):
        if self.cp is None:
            raise Exception('Tried to save None config')
        elif len(self.players) == 0:
            raise Exception('Tried to save config with zero players')

        # Clear previous config
        self.cp.clear()

        # Video settings
        section = 'video'
        self.cp.add_section(section)
        self.cp[section]['full_screen'] = str(self.full_screen)
        self.cp[section]['frames_per_second'] = str(self.frames_per_second)
        self.cp[section]['window_size'] = str(self.window_size)
        self.cp[section]['font'] = self.font
        self.cp[section]['draw_fps'] = str(self.draw_fps)

        if self.window_size not in VALID_WINDOW_SIZES:
            raise Exception('Invalid window size; Valid sizes: {}'
                            ''.format(VALID_WINDOW_SIZES))

        # Players
        for i, p in enumerate(self.players):
            section = "player_" + str(i)
            self.cp.add_section(section)
            self.cp[section]['name'] = p.name
            self.cp[section]['ctrl_up'] = self.keys_to_str(p.ctrl_up)
            self.cp[section]['ctrl_down'] = self.keys_to_str(p.ctrl_down)
            self.cp[section]['ctrl_left'] = self.keys_to_str(p.ctrl_left)
            self.cp[section]['ctrl_right'] = self.keys_to_str(p.ctrl_right)
            self.cp[section]['ctrl_pause'] = self.keys_to_str(p.ctrl_pause)
            self.cp[section]['ctrl_boost'] = self.keys_to_str(p.ctrl_boost)
            self.cp[section]['ctrl_shoot'] = self.keys_to_str(p.ctrl_shoot)

        with open(self.config_file, 'w') as fp:
            self.cp.write(fp)

    def add_player(self, player: Player):
        self.players.append(player)
Ejemplo n.º 24
0
class TestProcessModule(unittest.TestCase):
    def setUp(self):
        "Setup for process Module Testing"
        self.test_system = gridpi_core.System(
        )  # Create System container object

        # configure asset models
        self.parser = ConfigParser()
        self.parser.read_dict({
            'FEEDER': {
                'class_name': 'VirtualFeeder',
                'name': 'feeder'
            },
            'ENERGY_STORAGE': {
                'class_name': 'VirtualEnergyStorage',
                'name': 'inverter'
            },
            'GRID_INTERTIE': {
                'class_name': 'VirtualGridIntertie',
                'name': 'grid'
            }
        })

        asset_factory = model_core.AssetFactory(
        )  # Create Asset Factory object
        for cfg in self.parser.sections(
        ):  # Add models to System, The asset factory acts on a configuration
            self.test_system.add_asset(asset_factory.factory(self.parser[cfg]))
        del asset_factory

        # configure processes
        self.parser.clear()
        self.parser.read_dict({
            'process_1': {
                'class_name': 'EssUpdateStatus'
            },
            'process_2': {
                'class_name': 'GridUpdateStatus'
            },
            'process_3': {
                'class_name': 'EssSocPowerController',
                'inverter_target_soc': 0.6
            },
            'process_4': {
                'class_name': 'EssDemandLimitPowerController',
                'grid_kw_import_limit': 20,
                'grid_kw_export_limit': 20
            },
            'process_5': {
                'class_name': 'EssWriteControl'
            }
        })
        process_factory = process_core.ProcessFactory()
        for cfg in self.parser.sections():
            self.test_system.add_process(
                process_factory.factory(self.parser[cfg]))
        del process_factory

        self.test_system.process.sort()

        # Get an asyncio event loop so that we can run updateStatus() and updateCtrl() on assets.
        self.loop = asyncio.get_event_loop()

    def test_process_factory(self):
        """ To test if the process factory returns an object of the desired class
        """
        logging.debug(
            '********** Test process: test_process_factory **********')

        self.parser.clear()
        self.parser.read_dict({
            'test_process': {
                'class_name': 'EssSocPowerController',
                'some_special_attribute': 0.6
            }
        })

        PF = process_core.ProcessFactory()
        test_class = PF.factory(self.parser['test_process'])

        self.assertIsInstance(test_class,
                              process_plugins.EssSocPowerController)
        #self.assertEqual(test_class.config['some_special_attribute'], 0.6)

    def test_tag_aggregation(self):
        ''' Test the tag aggregation class constructor aggregates two classes with similar outputs

        '''
        logging.debug(
            '********** Test process: test_tag_aggregation **********')
        tag = 'inverter_kw_setpoint'

        inv_soc_pwr_ctrl_config = {
            "class_name": 'EssSocPowerController',
            "inverter_target_soc": 0.5,
            "target_inveter": 'inverter'
        }

        inv_dmdlmt_pwr_ctrl_config = {
            "class_name": 'EssDemandLimitPowerController',
            "grid_kw_import_limit": 10,
            "grid_kw_export_limit": 10,
            "target_inverter": 'inverter',
            "target_grid_intertie": 'grid'
        }

        inv_soc_pwr_ctrl = process_plugins.EssSocPowerController(
            inv_soc_pwr_ctrl_config)
        inv_dmdlmt_pwr_ctrl = process_plugins.EssDemandLimitPowerController(
            inv_dmdlmt_pwr_ctrl_config)

        process_list = [inv_soc_pwr_ctrl, inv_dmdlmt_pwr_ctrl]
        inv_pwr_ctrl_agg = process_plugins.AggregateProcessSummation(
            process_list)

        # Aggregate object is created
        self.assertIsInstance(inv_pwr_ctrl_agg, process_core.AggregateProcess)

        # Aggregate object composed of given objects
        self.assertIsInstance(inv_pwr_ctrl_agg._process_list[0],
                              process_plugins.EssSocPowerController)
        self.assertIsInstance(inv_pwr_ctrl_agg._process_list[1],
                              process_plugins.EssDemandLimitPowerController)

    def test_process(self):
        logging.debug('********** Test process: test_process **********')
        """ To test if data can be brought onto the tagbus from an asset, processed, and written back to the asset
        """

        # run updateStatus() twice. virtual components first update is an initializion state, then they begin to report.
        for x in range(2):
            tasks = asyncio.gather(
                *[x.update_status() for x in self.test_system.assets.assets])
            self.loop.run_until_complete(tasks)

            self.test_system.run_processes()

            tasks = asyncio.gather(
                *[x.update_control() for x in self.test_system.assets.assets])
            self.loop.run_until_complete(tasks)

        search_param = ('ess', 0, 'status', 'soc')
        resp = self.test_system.assets.get_asset(search_param[0])

        self.assertGreater(
            getattr(resp[0], search_param[2])[search_param[3]], 0.0)

    def test_GraphDependencies_sort(self):
        logging.debug(
            '********** Test process: test_graph_dependencies **********')
        self.test_system.process.sort()
Ejemplo n.º 25
0
def parse_arguments(args):
    opt = {}

    print('=> Reading config file and command line arguments')
    config = ConfigParser()
    config._interpolation = configparser.ExtendedInterpolation()
    config.read(args.config)

    # figure out which model we're working with
    if args.option is not None:
        config.set('params', 'option', args.option)
    option = config.get('params', 'option')

    # handle the case of resuming an older model
    opt['resume_new_log_folder'] = False
    if args.resume:
        config.set(option, 'resume', args.resume)
    opt['resume'] = config.get(option, 'resume')
    if opt['resume']:
        if os.path.isfile(os.path.dirname(opt['resume']) + '/config.ini'):
            print('=> Resume flag set; switching to resumed config file')
            args.config = os.path.dirname(opt['resume']) + '/config.ini'
            config.clear()
            config._interpolation = configparser.ExtendedInterpolation()
            config.read(args.config)
        else:
            opt['resume_new_log_folder'] = True

    if args.gpu:
        config.set('params', 'gpu', args.gpu)
    if args.noise_param_idx:
        config.set('params', 'noise_param_idx', ' '.join(args.noise_param_idx))
    if args.logdir:
        config.set(option, 'logdir', args.logdir)
    if args.log_name:
        config.set(option, 'log_name', args.log_name)
    if args.batch_size:
        config.set(option, 'batch_size', str(args.batch_size))
    if args.workers:
        config.set(option, 'workers', str(args.workers))
    if args.epochs:
        config.set(option, 'epochs', str(args.epochs))
    if args.lambda_tv:
        config.set(option, 'lambda_tv', str(args.lambda_tv))
    if args.print_every:
        config.set(option, 'print_every', str(args.print_every))
    if args.save_every:
        config.set(option, 'save_every', str(args.save_every))
    if args.lr:
        config.set(option, 'lr', str(args.lr))
    if args.train_files:
        config.set(option, 'train_files', args.train_files)
    if args.val_files:
        config.set(option, 'val_files', args.val_files)

    # read all values from config file
    opt['lambda_tv'] = float(config.get(option, 'lambda_tv'))
    opt['intensity_scale'] = 1

    opt['gpu'] = config.get('params', 'gpu')
    opt['noise_param_idx'] = int(config.get('params', 'noise_param_idx'))
    opt['logdir'] = config.get(option, 'logdir')
    opt['log_name'] = config.get(option, 'log_name')
    opt['batch_size'] = int(config.get(option, 'batch_size'))
    opt['workers'] = int(config.get(option, 'workers'))
    opt['epochs'] = int(config.get(option, 'epochs'))
    opt['print_every'] = int(config.get(option, 'print_every'))
    opt['save_every'] = int(config.get(option, 'save_every'))
    opt['lr'] = float(config.get(option, 'lr'))
    opt['train_files'] = config.get(option, 'train_files')
    opt['val_files'] = config.get(option, 'val_files')
    opt['optimizer_init'] = config.get(option, 'optimizer')
    opt['model_name'] = config.get(option, 'model_name')
    opt['spad_datapath'] = config.get(option, 'spad_datapath')
    opt['mono_datapath'] = config.get(option, 'mono_datapath')
    # write these values to config file
    cfgfile = open(args.config, 'w')
    config.write(cfgfile)
    cfgfile.close()

    return opt
Ejemplo n.º 26
0
class ConfigFile(object):
    '''A class that provides access to the application config file, including 
    functionality for validating and writing to the config file.
    '''

    _CONFIG_FILE_NAME = "settings.cfg"

    _DEFAULT_CONFIG = {
        DatabaseSection.NAME_IN_FILE: DatabaseSection.DEFAULT,
        CompanySection.NAME_IN_FILE: CompanySection.DEFAULT,
        PurchaseOrderSection.NAME_IN_FILE: PurchaseOrderSection.DEFAULT,
        LocaleSection.NAME_IN_FILE: LocaleSection.DEFAULT
    }

    def __init__(self):
        self._config_parser = ConfigParser()

    def load(self):
        if self.exists():
            # File exists. Load config parser with its contents.
            self._config_parser.read(self._CONFIG_FILE_NAME)
            self._validate_file_structure()
            self._load_sections()
        else:
            # File does not exist. Load config parser with defaults.
            self.set_to_defaults()

    def set_to_defaults(self):
        self._config_parser.clear()
        for section in self._DEFAULT_CONFIG.keys():
            self._config_parser[section] = self._DEFAULT_CONFIG[section]
        self._validate_file_structure()
        self._load_sections()

    def exists(self):
        return os.path.exists(self._CONFIG_FILE_NAME)

    def _validate_file_structure(self):
        for section in self._DEFAULT_CONFIG.keys():
            if not self._config_parser.has_section(section):
                raise AppConfigStructureError(("The config file does not have "
                                               "section {}.").format(section))
            section_dict = self._DEFAULT_CONFIG[section]
            for option in section_dict.keys():
                if not self._config_parser.has_option(section, option):
                    raise AppConfigStructureError(
                        ("The config file does not have "
                         "option {} in section {}.").format(option, section))

    def _load_sections(self):
        self.database = DatabaseSection(
            self._config_parser[DatabaseSection.NAME_IN_FILE])
        self.company = CompanySection(
            self._config_parser[CompanySection.NAME_IN_FILE])
        self.purchaseorder = PurchaseOrderSection(
            self._config_parser[PurchaseOrderSection.NAME_IN_FILE])
        self.locale = LocaleSection(
            self._config_parser[LocaleSection.NAME_IN_FILE])

    def validate(self):
        self.database.validate()
        self.company.validate()
        self.purchaseorder.validate()
        self.locale.validate()

    def write(self):
        self._config_parser[DatabaseSection.NAME_IN_FILE] = \
                                                self.database.get_dict()
        self._config_parser[CompanySection.NAME_IN_FILE] = \
                                                self.company.get_dict()
        self._config_parser[PurchaseOrderSection.NAME_IN_FILE] = \
                                                self.purchaseorder.get_dict()
        self._config_parser[LocaleSection.NAME_IN_FILE] = \
                                                self.locale.get_dict()
        with open(self._CONFIG_FILE_NAME, "w") as app_config:
            self._config_parser.write(app_config)
Ejemplo n.º 27
0
class ConfigParserEx:
    """配置文件基类"""
    def __init__(self):
        """构造器"""
        self._parser = ConfigParser()
        self._SUPPORT_TYPE = {
            r"int": self._get_int,
            r"str": self._get_str,
            r"bool": self._get_bool,
            r"json": self._get_json,
            r"float": self._get_float,
            r"file": self._get_file,
            r"file_str": self._get_file_str,
            r"file_bytes": self._get_file_bytes
        }

    def read(self, path: str, model: dict) -> dict:
        """读取配置文件"""
        self._parser.clear()
        self._parser.read(path, r"utf-8")

        return self._init_options(model)

    def read_string(self, string: str, model: dict) -> dict:
        """从字符串中读取配置信息"""
        self._parser.clear()
        self._parser.read_string(string)

        return self._init_options(model)

    def _init_options(self, model: dict) -> None:
        """初始化配置文件"""
        result = {}
        for section in model:
            result[section] = {
                key: self._SUPPORT_TYPE.get(value, self._get_str)(section, key)
                for key, value in model[section].items()
            }

        return result

    def _get_int(self, section: str, option: str) -> int:
        """获得配置文件中指定节下的指定配置项的值,并以 ``int`` 形式返回。"""
        return self._parser.getint(section, option)

    def _get_str(self, section: str, option: str) -> str:
        """获得配置文件中指定节下的指定配置项的值,并以 ``str`` 形式返回。"""
        return self._parser.get(section, option) or None

    def _get_bool(self, section: str, option: str) -> bool:
        """获得配置文件中指定节下的指定配置项的值,并以 ``bool`` 形式返回。"""
        return self._parser.getboolean(section, option)

    def _get_json(self, section: str, option: str) -> dict:
        """获得配置文件中指定节下的指定配置项的值,并进行Json反序列化后返回。"""
        return loads(self._get_str(section, option))

    def _get_float(self, section: str, option: str) -> float:
        """获得配置文件中指定节下的指定配置项的值,并以 ``float`` 形式返回。"""
        return self._parser.getfloat(section, option)

    def _get_file(self, section: str, option: str) -> BytesIO:
        """获得配置文件中指定节下的指定配置项的值,以此值为路径读取对应文件并以 ``ByteIO`` 形式返回。"""
        path = self._get_str(section, option)
        if path is None:
            return None

        with open(path, r"rb") as file:
            return BytesIO(file.read())

    def _get_file_bytes(self, section: str, option: str) -> bytes:
        """获得配置文件中指定节下的指定配置项的值,以此值为路径读取对应文件并以 ``bytes`` 形式返回。"""
        path = self._get_str(section, option)
        if path is None:
            return None

        with open(path, r"rb") as file:
            return file.read()

    def _get_file_str(self,
                      section: str,
                      option: str,
                      encoding=r"utf-8") -> str:
        """获得配置文件中指定节下的指定配置项的值,以此值为路径读取对应文件并以 ``str`` 形式返回。"""
        path = self._get_str(section, option)
        if path is None:
            return None

        with open(path, r"r", encoding=encoding) as file:
            return file.read()
def setup_nodes(cp: ConfigParser, api_endpoint: str) -> None:
    print('==== Nodes')
    print(
        'To produce alerts, the alerter needs something to monitor! The list '
        'of nodes to be included in the monitoring will now be set up. This '
        'includes validators, sentries, and whether these nodes can be used '
        'as data sources to monitor a node\'s state indirectly. You may '
        'include nodes from multiple substrate chains in any order. PANIC '
        'will group them up automatically. Node names must be unique!\n\n'
        'Note that you will be asked whether a node is an archive node or '
        'not. This is done because for archive monitoring (which includes '
        '(alerting for)/detecting slashing events), the alerter needs '
        'blockchain data from the past. You do not need any archive data '
        'source nodes to run PANIC, but for archive monitoring to be enabled '
        'for a chain you must have at least one for that chain.')

    # Check if list already set up
    if len(cp.sections()) > 0 and \
            not yn_prompt('The list of nodes is already set up. Do you wish to '
                          'clear this list? You will then be asked to set up a '
                          'new list of nodes, if you wish to do so (Y/n)\n'):
        return

    # Clear config and initialise new list
    cp.clear()
    nodes = []

    # Ask if they want to set it up
    if not yn_prompt('Do you wish to set up the list of nodes? (Y/n)\n'):
        return

    # Get node details and append them to the list of nodes
    while True:
        # Check that API is running by retrieving some data which will be used.
        polkadot_api_data_wrapper = PolkadotApiWrapper(DUMMY_LOGGER,
                                                       api_endpoint)
        while True:
            try:
                web_sockets_connected_to_api = polkadot_api_data_wrapper. \
                    get_web_sockets_connected_to_an_api()
                break
            except Exception:
                if not yn_prompt(
                        'Could not connect with the API Server at '
                        '\'{}\'. Please make sure that the API Server '
                        'is running at the provided IP before '
                        'proceeding further. Do you want to try '
                        'again? (Y/n)\n'.format(api_endpoint)):
                    return
        node = get_node(nodes, polkadot_api_data_wrapper,
                        web_sockets_connected_to_api)
        if node is not None:
            nodes.append(node)
            if node.node_is_validator:
                print('Successfully added validator node.')
            else:
                print('Successfully added full node.')

        if not yn_prompt('Do you want to add another node? (Y/n)\n'):
            break

    # Add nodes to config
    for i, node in enumerate(nodes):
        section = 'node_' + str(i)
        cp.add_section(section)
        cp[section]['node_name'] = node.node_name
        cp[section]['chain_name'] = node.chain_name
        cp[section]['node_ws_url'] = node.node_ws_url
        cp[section]['node_is_validator'] = str(node.node_is_validator)
        cp[section]['is_archive_node'] = str(node.is_archive_node)
        cp[section]['monitor_node'] = str(node.monitor_node)
        cp[section]['use_as_data_source'] = str(node.use_as_data_source)
        cp[section]['stash_account_address'] = node.stash_account_address
Ejemplo n.º 29
0
#GETTING INI FILES FROM A DIRECTORY-------------------------------------------------------
iniFiles = []  # List of file names to be appended
with os.scandir(path) as entries:
    for entry in entries:
        if str(entry.name).endswith(
                'ini'):  # Only .ini files will be added to the list
            iniFiles.append(path + '/' + str(entry.name))
#------------------------------------------------------------------------------------------

if len(iniFiles) == 0:  # Checking if the .ini files exist
    print('That directory does not contain any <.ini> type files.')
    sys.exit()

count = 0  # To keep a count of if anything was printed to user

# READING INI FILES & SECTIONS-----------------------------------------
for i in iniFiles:
    config.read(i)
    for j in config.sections():
        if config.has_option(j, description):
            print(j + ':', config[j][description])
            count += 1
    config.clear()
#----------------------------------------------------------------------

if count == 0:  # If nothing was printed, let the user know
    print('No keys in any sections matched provided description.')
    sys.exit()
#----------------------------------------------------------------------
Ejemplo n.º 30
0
    def setUp(self):
        logging.basicConfig(format='%(levelname)s:%(message)s',
                            level=logging.DEBUG)  # configure logging
        self.gp = gridpi_core.System()

        # configure asset models
        parser = ConfigParser()
        parser.read_dict({
            'FEEDER': {
                'class_name': 'VirtualFeeder',
                'class_type': 'feeder',
                'name': 'feeder'
            },
            'ENERGY_STORAGE': {
                'class_name': 'VirtualEnergyStorage',
                'class_type': 'ess',
                'name': 'inverter'
            },
            'GRID_INTERTIE': {
                'class_name': 'VirtualGridIntertie',
                'class_type': 'grid',
                'name': 'grid'
            }
        })

        asset_factory = model_core.AssetFactory(
        )  # Create Asset Factory object
        for cfg in parser.sections(
        ):  # Add models to System, The asset factory acts on a configuration
            self.gp.add_asset(asset_factory.factory(parser[cfg]))
        del asset_factory

        # configure processes
        parser.clear()
        parser.read_dict({
            'process_1': {
                'class_name': 'EssUpdateStatus'
            },
            'process_2': {
                'class_name': 'GridUpdateStatus'
            },
            'process_3': {
                'class_name': 'EssSocPowerController',
                'inverter_target_soc': 0.6
            },
            'process_4': {
                'class_name': 'EssDemandLimitPowerController',
                'grid_kw_import_limit': 20,
                'grid_kw_export_limit': 20
            },
            'process_5': {
                'class_name': 'EssWriteControl'
            }
        })
        process_factory = process_core.ProcessFactory()
        for cfg in parser.sections():
            self.gp.add_process(process_factory.factory(parser[cfg]))
        del process_factory

        parser.clear()
        """
Ejemplo n.º 31
0
def start_monitor(config_path, metric_config_path):
    if not os.path.exists(config_path):
        logger.error(
            '{config_path} is not exist.'.format(config_path=config_path))
        return
    if not os.path.exists(metric_config_path):
        logger.error('{metric_config_path} is not exist.'.format(
            metric_config_path=metric_config_path))
        return

    config = ConfigParser()
    config.read(config_path)

    if not config.has_section('forecast') or not config.has_section(
            'database'):
        logger.error(
            "do not has 'forecast' or 'database' section in config file.")
        return

    if not config.has_option('forecast', 'forecast_alg'):
        logger.warn(
            "do not find 'forecast_alg' in forecast section, use default 'fbprophet'."
        )
        forecast_alg = forecast_algorithm('fbprophet')
    else:
        try:
            forecast_alg = forecast_algorithm(
                config.get('forecast', 'forecast_alg'))
        except Exception as e:
            logger.warn("{error}, use default method: 'fbprophet'.".format(
                error=str(e)))
            forecast_alg = forecast_algorithm('fbprophet')

    if not config.has_option('database', 'database_path'):
        logger.error("do not find 'database_path' in database section...")
        return
    else:
        database_path = config.get('database', 'database_path')
        database_path = os.path.realpath(database_path)

    monitor_service = Monitor()
    config.clear()
    config.read(metric_config_path)

    metric_task_from_py = get_funcs(metric_task)
    metric_name_from_py = [item[0] for item in metric_task_from_py]
    metric_name_from_config = config.sections()

    default_metric_parameter_values = {
        'forecast_interval': '120S',
        'forecast_period': '60S',
        'data_period': '60S'
    }

    for metric_name in set(metric_name_from_config).union(
            set(metric_name_from_py)):
        if metric_name in set(metric_name_from_config).difference(
                set(metric_name_from_py)):
            logger.error(
                "{metric_name} is not defined in 'task/metric_task.py', abandon monitoring."
                .format(metric_name=metric_name))
            continue

        if metric_name in set(metric_name_from_py).difference(
                set(metric_name_from_config)):
            logger.error(
                "{metric_name} has no config information in 'task/metric_config.conf', abandon monitoring."
                .format(metric_name=metric_name))
            continue

        if metric_name in set(metric_name_from_py).intersection(
                set(metric_name_from_config)):
            kwargs = {}
            if not config.has_option(metric_name,
                                     'maximum') and not config.has_option(
                                         metric_name, 'minimum'):
                logger.error(
                    "{metric_name} do not provide any range parameter ('minimum' or 'maximum'), skip monitor."
                    .format(metric_name=metric_name))
                continue
            else:
                if config.has_option(metric_name, 'maximum'):
                    kwargs['maximum'] = config.getfloat(metric_name, 'maximum')
                if config.has_option(metric_name, 'minimum'):
                    kwargs['minimum'] = config.getfloat(metric_name, 'minimum')

            for parameter, default_value in default_metric_parameter_values.items(
            ):
                if not config.has_option(metric_name, parameter):
                    logger.warn(
                        "{metric_name} do not provide {parameter}, use default value: {default_value}."
                        .format(parameter=parameter,
                                metric_name=metric_name,
                                default_value=default_value))
                    value = default_value
                else:
                    temp_value = config.get(metric_name, parameter)
                    if parameter == 'data_period' and temp_value.isdigit():
                        value = int(temp_value)
                    else:
                        try:
                            value_number, value_unit = re.match(
                                r'(\d+)?([WDHMS])', temp_value).groups()
                            if value_number is None or value_unit is None or value_unit not in (
                                    'S', 'M', 'H', 'D', 'W'):
                                logger.error(
                                    "wrong value: {metric_name} - {parameter}, only support 'S(second)' 'M(minute)'"
                                    "'H(hour)' 'D(day)' 'W(week)', not support '{unit}', use default value: {default_value}"
                                    .format(metric_name=metric_name,
                                            unit=value_unit,
                                            parameter=parameter,
                                            default_value=default_value))
                                value = default_value
                            else:
                                value = temp_value
                        except Exception as e:
                            logger.error(
                                "{metric_name} - {parameter} error: {error}, use default value: {default_value}."
                            )
                            value = default_value

                kwargs[parameter] = value

            kwargs['forecast_alg'] = forecast_alg()
            kwargs['database_path'] = database_path
            kwargs['data_handler'] = DataHandler
            kwargs['metric_name'] = metric_name

            monitor_service.apply(Forecastor(**kwargs))

    monitor_service.start()
Ejemplo n.º 32
0
class ConfigFile(object):
    '''A class that provides access to the application config file, including 
    functionality for validating and writing to the config file.
    '''

    _CONFIG_FILE_NAME = "settings.cfg"
    
    _DEFAULT_CONFIG = {
        DatabaseSection.NAME_IN_FILE: DatabaseSection.DEFAULT,
        CompanySection.NAME_IN_FILE: CompanySection.DEFAULT,
        PurchaseOrderSection.NAME_IN_FILE: PurchaseOrderSection.DEFAULT,
        LocaleSection.NAME_IN_FILE: LocaleSection.DEFAULT
        }
    
    def __init__(self):
        self._config_parser = ConfigParser()
        
    def load(self):
        if self.exists():
            # File exists. Load config parser with its contents.
            self._config_parser.read(self._CONFIG_FILE_NAME)
            self._validate_file_structure()
            self._load_sections()
        else:
            # File does not exist. Load config parser with defaults.
            self.set_to_defaults()
        
    def set_to_defaults(self):
        self._config_parser.clear()
        for section in self._DEFAULT_CONFIG.keys():
            self._config_parser[section] = self._DEFAULT_CONFIG[section]
        self._validate_file_structure()
        self._load_sections()
        
    def exists(self):
        return os.path.exists(self._CONFIG_FILE_NAME)
    
    def _validate_file_structure(self):
        for section in self._DEFAULT_CONFIG.keys():
            if not self._config_parser.has_section(section):
                raise AppConfigStructureError(
                            ("The config file does not have "
                             "section {}.").format(section))
            section_dict = self._DEFAULT_CONFIG[section]
            for option in section_dict.keys():
                if not self._config_parser.has_option(section, option):
                    raise AppConfigStructureError(
                            ("The config file does not have "
                             "option {} in section {}.").format(option,
                                                                section))

    def _load_sections(self):
        self.database = DatabaseSection(self._config_parser[
                                            DatabaseSection.NAME_IN_FILE])
        self.company = CompanySection(self._config_parser[
                                            CompanySection.NAME_IN_FILE])
        self.purchaseorder = PurchaseOrderSection(self._config_parser[
                                            PurchaseOrderSection.NAME_IN_FILE])
        self.locale = LocaleSection(self._config_parser[
                                            LocaleSection.NAME_IN_FILE])
        
    def validate(self):
        self.database.validate()
        self.company.validate()
        self.purchaseorder.validate()
        self.locale.validate()
        
    def write(self):
        self._config_parser[DatabaseSection.NAME_IN_FILE] = \
                                                self.database.get_dict()
        self._config_parser[CompanySection.NAME_IN_FILE] = \
                                                self.company.get_dict()
        self._config_parser[PurchaseOrderSection.NAME_IN_FILE] = \
                                                self.purchaseorder.get_dict()
        self._config_parser[LocaleSection.NAME_IN_FILE] = \
                                                self.locale.get_dict()
        with open(self._CONFIG_FILE_NAME, "w") as app_config:
            self._config_parser.write(app_config)