Example #1
0
def save_settings(flox, name, scope, profile, settings, remove=None):
    """Save new configuration settings to scoped file"""
    if remove is None:
        remove = []

    file_name = f"settings.{profile}.toml" if profile else "settings.toml"
    file_path = join(CONFIG_DIRS.get(scope, join(flox.working_dir, ".flox")),
                     file_name)

    current = open(file_path).read() if isfile(file_path) else ""

    scoped_config = anyconfig.loads(current,
                                    ignore_missing=True,
                                    ac_parser="toml")
    section = scoped_config.get(name, {})
    section.update(settings)

    for r in remove:
        section.pop(r, None)

    scoped_config[name] = section

    os.makedirs(dirname(file_path), exist_ok=True)
    with open(file_path, "w+") as f:
        anyconfig.dump(scoped_config, f, ac_parser="toml")

    info(f"Configuration saved: {click.format_filename(file_path)}")
Example #2
0
    def write(self, custom_config: dict = None):
        # Note: this is only dealing with user config
        if not os.path.exists(self.config_path):
            # Create an empty config
            with open(self.config_path, "a"):
                os.utime(self.config_path, None)
        try:
            if not custom_config:
                backplane_config = anyconfig.loads(
                    json.dumps(self.toDict()), ac_parser="json"
                )
            else:
                # Only write user config, not the whole thing
                user_config = anyconfig.load([str(self.config_path)])
                anyconfig.merge(user_config, self.toDict(custom_config))
                backplane_config = user_config
            # anyconfig.merge(backplane_config, config)
            # if os.path.exists(config_path):

            # Open ~/.backplane/contexts/default/backplane.yml
            # Save config as yml

            with open(self.config_path, "w+") as writer:
                writer.write(anyconfig.dumps(backplane_config, ac_parser="yaml"))

            return backplane_config
        except OSError as e:
            raise ConfigNotFound(e)
Example #3
0
 def load_config(self, config_file=None, config_file_str=None):
     if config_file is not None:
         self.config_dict = anyconfig.load(config_file, ac_parser="yaml")
         for k, v in self.config_dict.items():
             setattr(self, k, v)
     else:
         self.config_dict = anyconfig.loads(config_file_str, ac_parser="yaml")
         for k, v in self.config_dict.items():
             setattr(self, k, v)
Example #4
0
    def test_24_find_firewall_policy_by_ipa__match_ip(self):
        self._arrange_uploaded_and_procecced_files()
        for hname in self.hostnames:
            upath = os.path.join(FIND_PREFIX, hname, "192.168.3.5")
            resp = self.client.get(upath)
            self.assert200(resp)

            res = anyconfig.loads(
                resp.data.decode("utf-8"),  # b -> u
                ac_parser="json")
            self.assertTrue(res)  # == [{"edit":, ...}]
Example #5
0
    def test_22_find_firewall_policy_by_ipa__not_found(self):
        self._arrange_uploaded_and_procecced_files()
        for hname in self.hosts:
            upath = os.path.join(FIND_PREFIX, hname, "127.0.0.1")
            resp = self.client.get(upath)
            self.assert200(resp)

            res = anyconfig.loads(
                resp.data.decode("utf-8"),  # b -> u
                ac_parser="json")
            self.assertFalse(res)  # == []
Example #6
0
    def test_34_find_firewall_policy_by_addr__contain_ipa(self):
        self._arrange_uploaded_and_procecced_files()
        for hname in self.hostnames:
            upath = os.path.join(FIND_PREFIX, hname, "192.168.2.2")
            resp = self.client.get(upath)
            self.assert200(resp)

            res = anyconfig.loads(
                resp.data.decode("utf-8"),  # b -> u
                ac_parser="json")
            self.assertTrue(res)
Example #7
0
    def test_20_find_networks_by_addr__found(self):
        ipa = "192.168.122.5"
        for upf in self.up_files:
            fname = os.path.basename(upf)
            upath = os.path.join(TT.API_PREFIX, "by_addr", fname, ipa)

            resp = self.client.get(upath)

            self.assert200(resp, _err_msg(resp, "path: " + upath,
                                          "ip: " + ipa))
            self.assertTrue(resp.data)
            data = anyconfig.loads(resp.data, ac_parser="json")
            self.assertEqual(data[0]["addrs"][0], "192.168.122.0/24", data[0])
Example #8
0
    def test_10_find_networks_by_addr__not_found(self):
        ipa = "127.0.0.1"
        for upf in self.up_files:
            fname = os.path.basename(upf)
            upath = os.path.join(TT.API_PREFIX, "by_addr", fname, ipa)

            resp = self.client.get(upath)

            self.assert200(resp, _err_msg(resp, "path: " + upath,
                                          "ip: " + ipa))
            self.assertTrue(resp.data)
            data = anyconfig.loads(resp.data, ac_parser="json")
            self.assertEqual(data, [], data)
Example #9
0
    def test_30_find_networks_by_path__not_found(self):
        (src, dst) = ("192.168.1.5", "127.0.0.1")

        for upf in self.up_files:
            fname = os.path.basename(upf)
            upath = os.path.join(TT.API_PREFIX, "by_path", fname, src, dst)

            resp = self.client.get(upath)
            self.assert200(
                resp,
                _err_msg(
                    resp, "path: {}, src: {}, "
                    "dst: {}".format(upath, src, dst)))
            self.assertTrue(resp.data)
            data = anyconfig.loads(resp.data, ac_parser="json")
            self.assertEqual(data, [])
Example #10
0
    def test_32_find_networks_by_path__found(self):
        (sip, dip) = ("192.168.122.10", "192.168.5.2")
        # (snt, dnt) = ("192.168.122.0/24", "192.168.5.0/24")

        for upf in self.up_files:
            fname = os.path.basename(upf)
            upath = os.path.join(TT.API_PREFIX, "by_path", fname, sip, dip)

            resp = self.client.get(upath)
            self.assert200(
                resp,
                _err_msg(
                    resp, "path: {}, src: {}, "
                    "dst: {}".format(upath, sip, dip)))
            self.assertTrue(resp.data)
            data = anyconfig.loads(resp.data, ac_parser="json")  # [[node]]
            self.assertNotEqual(data, [])
Example #11
0
        def __init__(self):
            self.config = anyconfig.loads(self.DEFAULT_CONFIG,
                                          ac_parser='yaml')

            file_name = os.environ.get('LOCUST_CONFIG')
            if not file_name or not os.path.isfile(file_name):
                raise Exception('invalid test configuration for locust, '
                                'check LOCUST_CONFIG environment variable.')
            anyconfig.merge(self.config,
                            anyconfig.load(file_name, ac_parser='yaml'))

            self.config['s3']['endpoint'] = os.getenv(
                'S3_ENDPOINT', self.config['s3']['endpoint'])
            self.config['s3']['endpoint'] = [
                u.strip() for u in self.config['s3']['endpoint'].split(',')
            ]
            self.config['s3']['access_key'] = os.getenv(
                'S3_ACCESS_KEY', self.config['s3']['access_key'])
            self.config['s3']['access_secret'] = os.getenv(
                'S3_ACCESS_SECRET', self.config['s3']['access_secret'])
            if 'cache' in self.config:
                self.config['cache']['server'] = os.getenv(
                    'LT_CACHE_SERVER', self.config['cache'].get('server'))
                self.config['cache']['port'] = os.getenv(
                    'LT_CACHE_SERV_PORT', self.config['cache'].get('port'))
                self.config['cache']['db'] = os.getenv(
                    'LT_CACHE_SERV_DB', self.config['cache'].get('db'))

            try:
                config_weights = self.config['data']['weights']
                for value in config_weights.values():
                    value['LOW'] = parse_size(str(value['LOW']))
                    value['HIGH'] = parse_size(str(value['HIGH']))
            except KeyError:
                pass

            try:
                self.config['ops']['put_object']['limit'][
                    'size_limit'] = parse_size(
                        str(self.config['ops']['put_object']['limit']
                            ['size_limit']))
            except KeyError:
                pass
Example #12
0
def writeConfig(config_path: str, config):
    try:
        backplane_config = anyconfig.loads(json.dumps(config),
                                           ac_parser="json")
        # anyconfig.merge(backplane_config, config)
        # if os.path.exists(config_path):

        # Open ~/.backplane/contexts/default/backplane.yml
        # Save config as yml
        with open(config_path, "w+") as writer:
            writer.write(anyconfig.dumps(backplane_config, ac_parser="yaml"))

        return backplane_config
    except OSError as e:
        typer.secho(
            f"Couldn't write backplane config at {config_path}: {e}",
            err=True,
            fg=typer.colors.RED,
        )
        sys.exit(1)
Example #13
0
def load_uixmlgz(repo, outdir, root=os.path.sep):
    """
    :param repo: Repo ID, e.g. rhel-7-server-rpms (RH CDN)
    :param outdir: Dir to save outputs
    :param root: Root dir in which cachdir, e.g. /var/cache/dnf/, exists

    :return: [update loaded from updateinfo.xml]
    """
    LOG.debug("Loading updateinfo.xml.gz for %s [root=%s]", repo, root)
    uixmlgz = find_uixmlgz_path(repo, root=root)
    if uixmlgz is None:
        LOG.warn("Could not find updateinfo.xml.gz: repo=%s, root=%s",
                 repo, root)
        return False

    with gzip.open(uixmlgz) as inp:
        # FIXME: Not work as expected, 'ParseError: not well-formed ...'
        # uidata = anyconfig.load(inp, ac_parser="xml")
        uidata = anyconfig.loads(inp.read(), ac_parser="xml",
                                 ac_parse_value=True, merge_attrs=True)

    if not uidata or "updates" not in uidata:
        LOG.error("Failed to load or parse updateinfo.xml: repo=%s, root=%s",
                  repo, root)
        return []

    if not uidata["updates"] or "update" not in uidata["updates"][0]:
        LOG.error("Failed to parse or empty updateinfo.xml: repo=%s, root=%s",
                  repo, root)
        return []

    if not os.path.exists(outdir):
        LOG.info("Creating dir to save results: %s", outdir)
        os.makedirs(outdir)
    elif not os.path.isdir(outdir):
        raise RuntimeError("Output dir '{}' is not a dir!".format(outdir))

    # Save parsed but not modified data.
    _save_data_as_json(uidata, os.path.join(outdir, repo, "updateinfo.json"))

    return uidata["updates"]
Example #14
0
def loads(file_name):
    _, extension = path_splitext(file_name)
    with open_file(file_name) as any_format_file:
        content = anyconfig.loads(any_format_file.read(),
                                  ac_parser=extension[1:])
    return content
Example #15
0
 def parse(x, *args, **kwargs):
     return discover(anyconfig.loads(x, *args, **kwargs))