Exemplo n.º 1
0
def dumps(instance: object, out: str):
    conf = __generate(instance, "")

    if out:
        if out.lower() == "hocon":
            return HOCONConverter.to_hocon(conf)
        if out.lower() == "yaml":
            return HOCONConverter.to_yaml(conf)
        if out.lower() == "json":
            return HOCONConverter.to_json(conf)
        if out.lower() == "properties":
            return HOCONConverter.to_properties(conf)

    return conf
Exemplo n.º 2
0
    def to_dict(self, with_include=True):
        """Convert HOCON string into dict.

        Args:
            with_include:
                If True then double-quote-escaped `include` statements will be kept as a plain string
                under key HOCONSTRING_INCLUDE_KEY.
                Otherwise, `include` statements will be excluded.
        """
        if with_include:
            hocon_str = self._hocon_str
        else:
            hocon_str = self.get_contents(with_include=False)

        c = ConfigFactory.parse_string(hocon_str)
        j = HOCONConverter.to_json(c)

        return json.loads(j)
Exemplo n.º 3
0
def load_config(cwd=os.getcwd(), debug=False):
    """
    Tries to find HOCON files named "iss4e.conf" using the paths returned by find_files().
    The found files are then parsed and merged together, so that a single configuration dict is returned.
    For details on HOCON syntax, see https://github.com/chimpler/pyhocon and https://github.com/typesafehub/config/

    Example configuration:
    - default config in home dir (~/iss4e.conf):
        datasources {
            influx {
                host = ${HOSTNAME}
                # also set your passwords (e.g. from env with ${MYSQL_PASSWD} here
            }
            mysql {
                host = localhost
            }
        }

    - local config in cwd (./iss4e.conf):
        webike {
            # use the generic information from ${datasources.influx} (should be defined in ~/iss4e.conf and contain
            # host, password, ...) and extend it to use the (non-generic) database "webike"
            influx = ${datasources.influx} {
                db = "webike"
            }
        }

    - merged config that will be returned:
        {
            "datasources": {
                "influx": {
                    "host": "SD959-LT"
                },
                "mysql": {
                    "host": "localhost"
                }
            },
            "webike": {
                "influx": {
                    "host": "SD959-LT", # copied from ~/iss4e.conf: datasources.influx
                    "db": "webike"
                }
            }
        }
    """
    # find "iss4e.conf" file in current working dir or parent directories
    files = find_files("iss4e.conf", cwd)
    configs = [
        ConfigFactory.parse_file(file, required=False, resolve=False)
        for file in files if os.path.isfile(file)
    ]
    if debug:
        print("Config files:\n" +
              "\n".join(file + " [" +
                        ("not " if not os.path.isfile(file) else "") + "found]"
                        for file in files))
    # merge all levels of config
    config = ConfigTree(root=True)
    config.put(
        "__main__",
        os.path.basename(
            getattr(sys.modules['__main__'], "__file__", "__cli__")))
    config.put("__cwd__", os.path.abspath(cwd))
    for c in configs:
        config = ConfigTree.merge_configs(c, config)
    config = ConfigParser.resolve_substitutions(config)
    if debug:
        print("Loaded config:\n" + HOCONConverter.to_json(config))

    # if config contains a key "logging", use it to reconfigure python logging
    if "logging" in config:
        if debug:
            print("Reconfiguring logging from config")
        if config.get("capture_exceptions", True):
            sys.excepthook = log_uncaught_exception
        logging.captureWarnings(config.get("capture_warnings", True))
        logging.config.dictConfig(config["logging"].as_plain_ordered_dict())

    # check python version
    # iss4e lib is using some syntax features and functions which were only introduced in python 3.5
    rec_ver = tuple(config.get("min_py_version", [3, 5]))
    if sys.version_info < rec_ver:
        warnings.warn(
            "Using outdated python version {}, a version >= {} would be recommended for use with iss4e lib. "
            "Try using a newer python binary, e.g. by calling `python{}.{}` instead of the default `python`."
            .format(sys.version_info, rec_ver, rec_ver[0], rec_ver[1]))

    return config
Exemplo n.º 4
0
 def to_dict(self):
     """Convert contents without include to dict.
     """
     c = ConfigFactory.parse_string(self._contents_wo_include)
     j = HOCONConverter.to_json(c)
     return json.loads(j)
Exemplo n.º 5
0
def dumps(obj: Any, **kwargs) -> str:
    return HOCONConverter.to_json(obj, **kwargs)
Exemplo n.º 6
0
 def parse_string(self, s):
     conf = ConfigFactory.parse_string(s)
     return json.loads(HOCONConverter.to_json(conf))
Exemplo n.º 7
0
 def parse_fp(self, fp):
     conf = ConfigFactory.parse_file(fp)
     return json.loads(HOCONConverter.to_json(conf))
Exemplo n.º 8
0
def save_config(args: Args, cfg: ConfigTree):
    config_path = args.run_dir / 'config.json'
    with open(config_path, 'w') as f:
        f.write(HOCONConverter.to_json(cfg))
Exemplo n.º 9
0
from pyhocon import ConfigFactory, HOCONConverter


conf = ConfigFactory.parse_file('samples/animal.conf')

print("cat says :", conf.get('cat.garfield.say'))
print("dog says :", conf.get('dog.mutt.say'))

## dog.conf has include "cat.conf" and both have key garfield.say. 
## The value from cat (meow) will override that of dog (bobooooo)
print("After merge :", conf.get('dog.mutt.hates.garfield.say'))

with open('samples/expanded_animal.json', "w") as output_json:
    output_json.writelines(HOCONConverter.to_json(conf))

# we can also use json.dumps
#   fd.write(json.dumps(confTree,indent=4))
Exemplo n.º 10
0
    def __get_backend_conf_str(self):
        """
        Initializes the following backend stanzas,
        which are defined in "backend" {} in a Cromwell's backend
        configuration file:
            1) local: local backend
            2) gc: Google Cloud backend (optional)
            3) aws: AWS backend (optional)
            4) slurm: SLURM (optional)
            5) sge: SGE (optional)
            6) pbs: PBS (optional)

        Also, initializes the following common non-"backend" stanzas:
            a) common: base stanzas
            b) mysql: connect to MySQL (optional)

        Then converts it to a HOCON string
        """
        # init backend dict
        backend_dict = {}

        # common stanza for backend conf file
        merge_dict(
            backend_dict,
            CaperBackendCommon(
                port=self._port,
                disable_call_caching=self._disable_call_caching,
                max_concurrent_workflows=self._max_concurrent_workflows))

        # local backend
        merge_dict(
            backend_dict,
            CaperBackendLocal(out_dir=self._out_dir,
                              concurrent_job_limit=self._max_concurrent_tasks))
        # GC
        if self._gcp_prj is not None and self._out_gcs_bucket is not None:
            merge_dict(
                backend_dict,
                CaperBackendGCP(
                    gcp_prj=self._gcp_prj,
                    out_gcs_bucket=self._out_gcs_bucket,
                    concurrent_job_limit=self._max_concurrent_tasks))
        # AWS
        if self._aws_batch_arn is not None and self._aws_region is not None \
                and self._out_s3_bucket is not None:
            merge_dict(
                backend_dict,
                CaperBackendAWS(
                    aws_batch_arn=self._aws_batch_arn,
                    aws_region=self._aws_region,
                    out_s3_bucket=self._out_s3_bucket,
                    concurrent_job_limit=self._max_concurrent_tasks))
        # SLURM
        merge_dict(
            backend_dict,
            CaperBackendSLURM(out_dir=self._out_dir,
                              partition=self._slurm_partition,
                              account=self._slurm_account,
                              extra_param=self._slurm_extra_param,
                              concurrent_job_limit=self._max_concurrent_tasks))
        # SGE
        merge_dict(
            backend_dict,
            CaperBackendSGE(out_dir=self._out_dir,
                            pe=self._sge_pe,
                            queue=self._sge_queue,
                            extra_param=self._sge_extra_param,
                            concurrent_job_limit=self._max_concurrent_tasks))

        # PBS
        merge_dict(
            backend_dict,
            CaperBackendPBS(out_dir=self._out_dir,
                            queue=self._pbs_queue,
                            extra_param=self._pbs_extra_param,
                            concurrent_job_limit=self._max_concurrent_tasks))

        # Database
        if self._no_file_db is not None and self._no_file_db:
            file_db = None
        else:
            file_db = self._file_db
        merge_dict(
            backend_dict,
            CaperBackendDatabase(file_db=file_db,
                                 mysql_ip=self._mysql_db_ip,
                                 mysql_port=self._mysql_db_port,
                                 mysql_user=self._mysql_db_user,
                                 mysql_password=self._mysql_db_password,
                                 db_timeout=self._db_timeout))

        # set header for conf ("include ...")
        assert (Caper.BACKEND_CONF_HEADER.endswith('\n'))
        lines_header = [Caper.BACKEND_CONF_HEADER]

        # override with user-specified backend.conf if exists
        if self._backend_file is not None:
            lines_wo_header = []

            with open(CaperURI(self._backend_file).get_local_file(),
                      'r') as fp:
                for line in fp.readlines():
                    # find header and exclude
                    if re.findall(Caper.RE_PATTERN_BACKEND_CONF_HEADER, line):
                        if line not in lines_header:
                            lines_header.append(line)
                    else:
                        lines_wo_header.append(line)

            # parse HOCON to JSON to dict
            c = ConfigFactory.parse_string(''.join(lines_wo_header))
            j = HOCONConverter.to_json(c)
            d = json.loads(j)
            # apply to backend conf
            merge_dict(backend_dict, d)

        # use default backend (local) if not specified
        if self._backend is not None:
            backend_dict['backend']['default'] = self._backend
        else:
            backend_dict['backend']['default'] = Caper.DEFAULT_BACKEND

        # dict to HOCON (excluding header)
        backend_hocon = ConfigFactory.from_dict(backend_dict)
        # write header to HOCON string
        backend_str = ''.join(lines_header)
        # convert HOCON to string
        backend_str += HOCONConverter.to_hocon(backend_hocon)

        return backend_str
Exemplo n.º 11
0
def loads(s: AnyStr, **kwargs: Any) -> Dict[str, Any]:
    hocon_obj = ConfigFactory.parse_string(s, **kwargs)
    return json.loads(HOCONConverter.to_json(hocon_obj))