Example #1
0
 def test_replace_insert_parse_json_diff(self):
     output_diff = {
         "mrpc": {
             jsondiff.replace: pyhocon.ConfigTree(
                 [
                     ("classifier_dropout", 0.1),
                     ("classifier_hid_dim", 256),
                     ("max_vals", 8),
                     ("val_interval", 1),
                 ]
             )
         }
     }
     parsed_diff = utils.parse_json_diff(output_diff)
     assert isinstance(parsed_diff["mrpc"], pyhocon.ConfigTree)
     output_diff = {
         "mrpc": {
             jsondiff.insert: pyhocon.ConfigTree(
                 [
                     ("classifier_dropout", 0.1),
                     ("classifier_hid_dim", 256),
                     ("max_vals", 8),
                     ("val_interval", 1),
                 ]
             )
         }
     }
     parsed_diff = utils.parse_json_diff(output_diff)
     assert isinstance(parsed_diff["mrpc"], pyhocon.ConfigTree)
Example #2
0
def generate_module_shards(original_file, nodes=1, shard_name='', replicas=[]):
    """Generates module-shards.conf content.

    If shard_name and replicas parameters are filled, exactly one shard item is added to the content of orginal file.
    If more shards needed, then use this keyword more times with storing temp file after each addition and use it as
    <original_file> in the next step.

    Args:
        :param file_name: path to the filename, normally expecting file from system/org/opendaylight...
                          sal-clustering-config-<version>-moduleshardconf.xml
        :param: nodes: number of nodes in the cluster
        :param shard_name: new name of the additional shard
        :param replicas: list of member indexes which should keep shard replicas

    Returns:
        :returns str: module-shards.conf content
    """
    conf = _parse_input(original_file)
    for module_shard in conf['module-shards']:
        module_shard["shards"][0]["replicas"] = [
            "member-{}".format(i + 1) for i in range(int(nodes))
        ]
    if shard_name != '' and replicas != []:
        conf['module-shards'].append(
            pyhocon.ConfigTree([
                ("name", shard_name),
                ("shards", [
                    pyhocon.ConfigTree([
                        ("name", shard_name),
                        ("replicas", ["member-{}".format(i) for i in replicas])
                    ])
                ])
            ]))
    return pyhocon.tool.HOCONConverter.to_hocon(conf)
Example #3
0
    def parse(self, service_config):
        self._default = service_config.get(
            "_default", pyhocon.ConfigTree()).as_plain_ordered_dict()

        self._doc = '{} service'.format(self.name)
        description = service_config.get('_description', '')
        if description:
            self._doc += '\n\n{}'.format(description)
        self._definitions = service_config.get(
            "_definitions", pyhocon.ConfigTree()).as_plain_ordered_dict()
        self._definitions_refs = {
            k: self._get_schema_references(v)
            for k, v in self._definitions.items()
        }
        all_refs = set(itertools.chain(*self.definitions_refs.values()))
        if not all_refs.issubset(self.definitions):
            raise ValueError(
                "Unresolved references (%s) in %s/definitions" %
                (", ".join(all_refs.difference(self.definitions)), self.name))

        actions = {
            k: v.as_plain_ordered_dict()
            for k, v in service_config.items() if not k.startswith("_")
        }
        self._actions = {
            action_name: action
            for action_name, action in (
                (action_name,
                 self._parse_action_versions(action_name, action_versions))
                for action_name, action_versions in actions.items()) if action
        }
Example #4
0
 def test_replace_insert_parse_json_diff(self):
     input_diff = {
         "mrpc": pyhocon.ConfigTree(
             [
                 ("classifier_hid_dim", 256),
                 ("max_vals", 8),
                 ("classifier_dropout", 0.1),
                 ("val_interval", 1),
             ]
         ),
         "rte": {"configs": pyhocon.ConfigTree([("b", 1), ("a", 3)])},
     }
     sorted_diff = utils.sort_param_recursive(input_diff)
     assert list(sorted_diff["mrpc"].items()) == [
         ("classifier_dropout", 0.1),
         ("classifier_hid_dim", 256),
         ("max_vals", 8),
         ("val_interval", 1),
     ]
     assert list(sorted_diff["rte"]["configs"].items()) == [("a", 3), ("b", 1)]
Example #5
0
def load_archive(archive_file: str, cuda_device: int = -1, overrides: str = "") -> Archive:
    """
    Instantiates an Archive from an archived `tar.gz` file.

    Parameters
    ----------
    archive_file: ``str``
        The archive file to load the model from.
    cuda_device: ``int``, optional (default = -1)
        If `cuda_device` is >= 0, the model will be loaded onto the
        corresponding GPU. Otherwise it will be loaded onto the CPU.
    overrides: ``str``, optional (default = "")
        HOCON overrides to apply to the unarchived ``Params`` object.
    """
    # redirect to the cache, if necessary
    archive_file = cached_path(archive_file)

    # Extract archive to temp dir
    tempdir = tempfile.mkdtemp()
    logger.info("extracting archive file %s to temp dir %s", archive_file, tempdir)
    with tarfile.open(archive_file, 'r:gz') as archive:
        archive.extractall(tempdir)

    # Check for supplemental files in archive
    fta_filename = os.path.join(tempdir, _FTA_NAME)
    if os.path.exists(fta_filename):
        with open(fta_filename, 'r') as fta_file:
            files_to_archive = json.loads(fta_file.read())

        # Add these replacements to overrides
        replacement_hocon = pyhocon.ConfigTree(root=True)
        for key, _ in files_to_archive.items():
            replacement_filename = os.path.join(tempdir, f"fta/{key}")
            replacement_hocon.put(key, replacement_filename)

        overrides_hocon = pyhocon.ConfigFactory.parse_string(overrides)
        combined_hocon = replacement_hocon.with_fallback(overrides_hocon)
        overrides = json.dumps(combined_hocon)

    # Load config
    config = Params.from_file(os.path.join(tempdir, _CONFIG_NAME), overrides)
    config.loading_from_archive = True

    # Instantiate model. Use a duplicate of the config, as it will get consumed.
    model = Model.load(config.duplicate(),
                       weights_file=os.path.join(tempdir, _WEIGHTS_NAME),
                       serialization_dir=tempdir,
                       cuda_device=cuda_device)

    # Clean up temp dir
    shutil.rmtree(tempdir)

    return Archive(model=model, config=config)
Example #6
0
def generate_modules(original_file, name='', namespace=''):
    """Generates modules.conf content.

    If name and namespace parameters are filled, exactly one module item is added to the content of orginal file.
    If more modules needed, then use this keyword more times with storing temp file after each addition and use
    it as <original_file> in the next step.

    Args:
        :param original_files: path to the filename, normally expecting file from system/org/opendaylight...
                          sal-clustering-config-<version>-moduleconf.xml
        :param name: name of the new, addional shard
        :param namespace: namespace of the new, addional shard

    Returns:
        :returns str: modules.conf content
    """
    conf = _parse_input(original_file)
    if name != '' and namespace != '':
        conf['modules'].append(
            pyhocon.ConfigTree([("name", name), ("namespace", namespace),
                                ("shard-strategy", "module")]))
    return pyhocon.tool.HOCONConverter.to_hocon(conf)