Ejemplo n.º 1
0
def test_make_splitter(normal_dict, delimiter, delimiter_equivalent):
    splitter = make_splitter(delimiter)
    flat_dict = flatten(normal_dict, delimiter_equivalent)
    unflattened_dict_using_make_splitter = unflatten(flat_dict,
                                                     splitter=splitter)
    unflattened_dict_using_equivalent_splitter = unflatten(
        flat_dict, splitter=delimiter_equivalent)
    assert unflattened_dict_using_make_splitter == unflattened_dict_using_equivalent_splitter
Ejemplo n.º 2
0
def load_from_config(find_dict,
                     prefix=None,
                     path=default_config_path,
                     allow_sub_keys=False):
    if not prefix:
        prefix = tuple()

    config = load_config(path=path)
    if not config:
        return {}

    found_config_values = {}
    flat_find_dict = flatten_dict.flatten(find_dict, keep_empty_types=(dict, ))
    flat_config = flatten_dict.flatten(config, keep_empty_types=(dict, ))
    for find_key, _ in flat_find_dict.items():
        for flat_key, flat_value in flat_config.items():
            prefixed_key = prefix + find_key
            intersection = tuple([
                v for i, v in enumerate(prefixed_key)
                if i < len(flat_key) and v == flat_key[i]
            ])
            difference = tuple([v for v in flat_key if v not in intersection])
            # HACK, Only append differences at the current depth of the config,
            # e.g. don't allow subdict append
            if prefixed_key == intersection and (allow_sub_keys
                                                 or not difference):
                sub_key = find_key + difference
                found_config_values[sub_key] = flat_value
    return flatten_dict.unflatten(found_config_values)
Ejemplo n.º 3
0
    def __init__(self, configfile: str, fix_values, fix_missing, write=False):
        """
        Creates a config object from a json file
        :param configfile: the config filename (json)
        :param fix_values: a function to resolve values of the wrong type
        :param fix_missing: a function to resolve missing values
        :param write: writes changes (fix_values and fix_missing function results) to the file
        """
        parsed = load_config(configfile)
        flat = flatten(parsed, 'path')
        alloweds = flatten(internalconf.CONFIG_TYPES, 'path')
        buffer = dict()

        for key, _type in alloweds.items():
            if key not in flat.keys() or key is None:
                value = fix_missing(key)
            elif type(flat[key]) is not _type:
                value = fix_values(key)
            else:
                value = flat[key]

            buffer[key] = value

        if write:
            with open(config_path(configfile), 'w') as f:
                logging.info("Writing changes to the config file...")
                f.write(json.dumps(unflatten(buffer, 'path'), indent=4))

        # On windows, flatten uses \\ for paths instead of /. Fix that.
        self._dict = {k.replace('\\','/'):v for k,v in buffer.items()}
Ejemplo n.º 4
0
def test_unflatten_dict_underscore():
    flat_underscore_dict = {
        '_'.join(k): v
        for k, v in six.viewitems(flat_normal_dict)
    }
    assert unflatten(flat_underscore_dict,
                     splitter='underscore') == normal_dict
Ejemplo n.º 5
0
def load_missing_action_kwargs(kwargs_configurations):
    flat_action_kwargs = {}
    for kwargs in kwargs_configurations:
        for group, args in kwargs.items():
            # Set group prefixes
            kwargs_path = (group, )
            if kwargs_path not in flat_action_kwargs:
                flat_action_kwargs[kwargs_path] = {}

            input_dict = find_value_in_dict(args, key="action_kwargs")
            required_fields = find_value_in_dict(args,
                                                 key="valid_action_config")
            missing_dict = missing_fields(input_dict, required_fields)
            action_kwargs_flat_path = get_dict_path(args,
                                                    key="action_kwargs",
                                                    truncate=True)

            if action_kwargs_flat_path:
                dict_flat_path = action_kwargs_flat_path[:-1]
                if kwargs_path != dict_flat_path:
                    kwargs_path = kwargs_path + dict_flat_path
                    if kwargs_path not in flat_action_kwargs:
                        flat_action_kwargs[kwargs_path] = {}

            # Fill in the provided arguments
            flat_action_kwargs[kwargs_path] = input_dict

            config_prefix = find_value_in_dict(args, key="config_prefix")
            # Update with missing arguments from config
            loaded_from_config = load_from_config(missing_dict,
                                                  prefix=config_prefix)
            flat_action_kwargs[kwargs_path].update(loaded_from_config)

    return flatten_dict.unflatten(flat_action_kwargs)
Ejemplo n.º 6
0
def update(d: dict, path: str, value: Any):
    if not has(d, path):
        return d
    keys = tuple(path.split('.'))
    d_flat = flatten(d)
    d_flat[keys] = value
    return unflatten(d_flat)
Ejemplo n.º 7
0
    def update(self, path, value):

        if not self.loaded:
            raise ConfigNotLoaded("Configuration file not loaded.")

        keys = path.split(':')

        flat =  flatten(self.configuration)

        wlist = []
        for key in keys:
            wlist.append(key)

        #
        # If a dictionary was passed in update all values contained in the dictionary
        # are updated in the configuration
        if not isinstance(value,dict):  
            flat[ tuple(wlist)] = value
        else:
            for key in value:
                listkey = wlist.copy()
                listkey.append(key)
                flat[ tuple(listkey)] = value[key]    

        self.configuration = unflatten(flat)    
        self.modified = True
        self.save_config()
        return
Ejemplo n.º 8
0
    def config_to_dict(self):
        """return the configuration as an unflattened dict"""
        server = self.server_config.create_mapping(
            self.server_config.default_config)
        dataset = self.default_dataset_config.create_mapping(
            self.default_dataset_config.default_config)
        external = self.external_config.create_mapping(
            self.external_config.default_config)
        config = dict(server={}, dataset={})
        for attrname in server.keys():
            config["server__" + attrname] = getattr(self.server_config,
                                                    attrname)
        for attrname in dataset.keys():
            config["dataset__" + attrname] = getattr(
                self.default_dataset_config, attrname)
        if self.dataroot_config:
            config["per_dataset_config"] = {}
        for dataroot_tag, dataroot_config in self.dataroot_config.items():
            dataset = dataroot_config.create_mapping(
                dataroot_config.default_config)
            for attrname in dataset.keys():
                config[f"per_dataset_config__{dataroot_tag}__" +
                       attrname] = getattr(dataroot_config, attrname)
        for attrname in external.keys():
            config["external__" + attrname] = getattr(self.external_config,
                                                      attrname)

        config = unflatten(config, splitter=lambda key: key.split("__"))
        return config
Ejemplo n.º 9
0
    def update_data(file_path_to_write: str, file_path_to_read: str,
                    file_ending: str) -> None:
        """
        Collects special chosen fields from the file_path_to_read and writes them into the file_path_to_write.
        :param file_path_to_write: The output file path to add the special fields to.
        :param file_path_to_read: The input file path to read the special fields from.
        :param file_ending: The files ending
        :return: None
        """

        pack_obj_data, _ = get_dict_from_file(file_path_to_read)
        fields: list = DELETED_YML_FIELDS_BY_DEMISTO if file_ending == 'yml' else DELETED_JSON_FIELDS_BY_DEMISTO
        # Creates a nested-complex dict of all fields to be deleted by Demisto.
        # We need the dict to be nested, to easily merge it later to the file data.
        preserved_data: dict = unflatten(
            {
                field: dictor(pack_obj_data, field)
                for field in fields if dictor(pack_obj_data, field)
            },
            splitter='dot')

        if file_ending == 'yml':
            with open(file_path_to_write, 'r') as yf:
                file_yaml_object = yaml.load(yf)
            if pack_obj_data:
                merge(file_yaml_object, preserved_data)
            with open(file_path_to_write, 'w') as yf:
                yaml.dump(file_yaml_object, yf)

        elif file_ending == 'json':
            file_data: dict = get_json(file_path_to_write)
            if pack_obj_data:
                merge(file_data, preserved_data)
            with open(file_path_to_write, 'w') as jf:
                json.dump(obj=file_data, fp=jf, indent=4)
Ejemplo n.º 10
0
def uncollate(tbl: dict) -> List[dict]:
    tbl = {k: list(v) for k, v in flatten(tbl).items()}
    return [
        unflatten({k: v[i].unsqueeze(0)
                   for k, v in tbl.items()})
        for i in range(len(next(iter(tbl.values()))))
    ]
Ejemplo n.º 11
0
def delete(d: dict, path: str):
    if not has(d, path):
        return d
    keys = tuple(path.split('.'))
    d_flat = flatten(d)
    del d_flat[keys]
    return unflatten(d_flat)
Ejemplo n.º 12
0
def prepare_kwargs_configurations(provider,
                                  args,
                                  argument_groups,
                                  strip_group_prefix=True):
    """ Used to load missing arguments from the configuration file """
    # Try to find all available args
    kwargs_configurations = []
    for group in argument_groups:
        group_kwargs_config = {}
        name = group.lower()
        # Flat group_kwargs_config to do direct indexing
        if "_" in name:
            prefix = tuple(name.split("_"))
            group_kwargs_config = create_sub_dictionaries(
                group_kwargs_config, prefix)
        else:
            prefix = (name, )
            group_kwargs_config[name] = {}

        prefix_action_kwargs = prefix + ("action_kwargs", )
        prefix_action_config = prefix + ("valid_action_config", )
        prefix_config_prefix = prefix + ("config_prefix", )

        flat_group_kwargs_config = flatten_dict.flatten(
            group_kwargs_config, keep_empty_types=(dict, ))
        # TODO, subname on split prefix
        action_kwargs = vars(extract_arguments(args, [group]))
        if action_kwargs:
            # remove claimed action_kwargs from args
            args = remove_arguments(args,
                                    action_kwargs.keys(),
                                    prefix=name + "_")
            flat_group_kwargs_config[prefix_action_kwargs] = action_kwargs
        else:
            flat_group_kwargs_config[prefix_action_kwargs] = {}

        if group in corc_config_groups:
            valid_action_config = corc_config_groups[group]
            # gen config prefix
            config_prefix = gen_config_prefix(prefix)
            flat_group_kwargs_config[
                prefix_action_config] = valid_action_config
            flat_group_kwargs_config[prefix_config_prefix] = config_prefix

        provider_groups = get_provider_config_groups(provider)
        if group in provider_groups:
            valid_action_config = provider_groups[group]
            prefix = (provider, ) + prefix
            config_prefix = gen_config_provider_prefix(prefix)
            flat_group_kwargs_config[
                prefix_action_config] = valid_action_config
            flat_group_kwargs_config[prefix_config_prefix] = config_prefix

        if (prefix_action_config in flat_group_kwargs_config
                and flat_group_kwargs_config[prefix_action_config]):
            unflat_group_group_kwargs_config = flatten_dict.unflatten(
                flat_group_kwargs_config)
            kwargs_configurations.append(unflat_group_group_kwargs_config)
    return kwargs_configurations
Ejemplo n.º 13
0
    def undo(self) -> None:
        section: TableauSection = self.get_section()
        cell = self.get_cell()  # self.redo_command: str = "remove_one_column"

        prev = flatten(cell)
        content = flatten(self.content)
        to_update = unflatten({k: prev[k] for k in content})
        section.update_cell(self.y, self.x, to_update, self.cursor_avant)
Ejemplo n.º 14
0
    def _get_public_claims(self):
        public_claims = {
            k.replace(JWT.config.public_claim_namespace, ""): v
            for k, v in self.raw_data.items()
            if k.startswith(JWT.config.public_claim_namespace)
            and k not in ("iss", "sub", "aud", "exp", "nbf", "iat", "jti")
        }

        return unflatten(public_claims, splitter="path")
Ejemplo n.º 15
0
    def sample(self):
        if self._current_observation is None:
            self._current_observation = self.env.reset()

        policy_input = flatten_input_structure({
            key: self._current_observation[key][None, ...]
            for key in self.policy.observation_keys
        })
        action = self.policy.actions_np(policy_input)[0]

        next_observation, reward, terminal, info = self.env.step(action)
        self._path_length += 1
        self._path_return += reward
        self._total_samples += 1

        processed_sample = self._process_sample(
            observation=self._current_observation,
            action=action,
            reward=reward,
            terminal=terminal,
            next_observation=next_observation,
            info=info,
        )

        for key, value in flatten(processed_sample).items():
            self._current_path[key].append(value)

        if terminal or self._path_length >= self._max_path_length:
            last_path = unflatten({
                field_name: np.array(values)
                for field_name, values in self._current_path.items()
            })

            self.pool.add_path({
                key: value
                for key, value in last_path.items()
                if key != 'infos'
            })

            self._last_n_paths.appendleft(last_path)

            self._max_path_return = max(self._max_path_return,
                                        self._path_return)
            self._last_path_return = self._path_return

            self.policy.reset()
            self.pool.terminate_episode()
            self._current_observation = None
            self._path_length = 0
            self._path_return = 0
            self._current_path = defaultdict(list)

            self._n_episodes += 1
        else:
            self._current_observation = next_observation

        return next_observation, reward, terminal, info
Ejemplo n.º 16
0
    def add_sample(self, sample):
        sample_flat = flatten(sample)
        samples_flat = type(sample)([
            (field_name_flat, np.array(sample_flat[field_name_flat])[None, ...])
            for field_name_flat in sample_flat.keys()
        ])
        samples = unflatten(samples_flat)

        self.add_samples(samples)
Ejemplo n.º 17
0
def missing_fields(input_dict, required_fields, verbose=False, throw=False):
    missing = {}
    flat_input_fields = flatten_dict.flatten(input_dict)
    flat_required_fields = flatten_dict.flatten(required_fields)

    for k, v in flat_required_fields.items():
        # Not present or not set
        if not present_in(k, flat_input_fields) or not flat_input_fields[k]:
            missing[k] = None

    return flatten_dict.unflatten(missing)
Ejemplo n.º 18
0
def REPLACE_FULL_OBSERVATION(original_batch, resampled_batch, where_resampled,
                             environment):
    batch_flat = flatten(original_batch)
    resampled_batch_flat = flatten(resampled_batch)
    goal_keys = [key for key in batch_flat.keys() if key[0] == 'goals']
    for key in goal_keys:
        assert (batch_flat[key][where_resampled].shape ==
                resampled_batch_flat[key].shape)
        batch_flat[key][where_resampled] = (resampled_batch_flat[key])

    return unflatten(batch_flat)
Ejemplo n.º 19
0
def _update_config(current_config_json, value={}):
    changes = False
    current_config = current_config_json.copy()
    flatten_config = flatten(current_config)
    for k, v in flatten(value).items():
        if k in flatten_config:
            flatten_config[k] = v
    updated_config = unflatten(flatten_config)
    if flatten(current_config_json) != flatten_config:
        changes = True
    return updated_config, changes
Ejemplo n.º 20
0
def generate_oci_config(overwrite_with_empty=False, **kwargs):
    config = default_oci_config
    if kwargs:
        flat = flatten_dict.flatten(config)
        other_flat = flatten_dict.flatten(kwargs)
        for k, v in other_flat.items():
            if not v and overwrite_with_empty:
                flat[k] = v
            if v:
                flat[k] = v
        config = flatten_dict.unflatten(flat)
    return config
Ejemplo n.º 21
0
def _update_config(config, update):
    config = flatten(config)
    update = flatten(update)

    for opt, val2 in update.items():
        if isinstance(val2, list) and opt in config:
            val1 = config[opt]
            assert isinstance(val1, list)
            config[opt] = val1 + val2  # Append update for lists
        else:
            config[opt] = val2

    return unflatten(config)
    def _relabel_batch(self, batch, indices, her_strategy):
        batch_size = indices.size
        batch['resampled_distances'] = np.full((batch_size, 1),
                                               np.float('inf'))
        batch['resampled'] = np.zeros((batch_size, 1), dtype='bool')

        if her_strategy:
            her_strategy_type = self._her_strategy['type']
            goal_resampling_probability = self._her_strategy[
                'resampling_probability']

            to_resample_mask = (np.random.rand(batch_size) <
                                goal_resampling_probability)
            where_resampled = np.flatnonzero(to_resample_mask)
            to_resample_indices = indices[where_resampled]

            episode_first_distances = -1 * batch['episode_index_forwards'][
                where_resampled]
            episode_last_distances = batch['episode_index_backwards'][
                where_resampled]

            resampled_indices, resampled_distances = self._resample_indices(
                to_resample_indices, episode_first_distances,
                episode_last_distances, her_strategy_type)

            resampled_batch_flat = flatten(
                super(HindsightExperienceReplayPool,
                      self).batch_by_indices(indices=resampled_indices,
                                             field_name_filter=None))

            batch_flat = flatten(batch)
            goal_keys = [key for key in batch_flat.keys() if key[0] == 'goals']
            for key in goal_keys:
                assert (batch_flat[key][where_resampled].shape ==
                        resampled_batch_flat[key].shape)
                batch_flat[key][where_resampled] = (resampled_batch_flat[key])

            if self._reward_function:
                batch_flat[('rewards', )][where_resampled] = (
                    self._reward_function(resampled_batch_flat))
            if self._terminal_function:
                batch_flat[('terminals', )][where_resampled] = (
                    self._terminal_function(resampled_batch_flat))

            batch = unflatten(batch_flat)

            batch['resampled_distances'][where_resampled] = (
                resampled_distances)
            batch['resampled'][where_resampled] = True

        return batch
Ejemplo n.º 23
0
 def load_manifest(self):
     containers_tuple = ('spec', 'template', 'spec', 'containers')
     init_containers_tuple = ('spec', 'template', 'spec', 'initContainers')
     self.logger.info(f"Loading manifest {self.generic_yaml.absolute()}")
     with open(self.generic_yaml, 'r') as stream:
         try:
             docs: List = list(yaml.safe_load_all(stream))
             cont_docs = containers(docs)
             for i in range(0, len(docs)):
                 doc = docs[i]
                 if doc is not None and doc in cont_docs:
                     self.logger.debug("Processing doc kind : {}".format(
                         doc["kind"]))
                     name = None
                     try:
                         doc_flat = flatten(doc)
                         for c in doc_flat[containers_tuple]:
                             name = c['name']
                             image = c["image"]
                             image_inter = self.interpolate_image(
                                 self.variables, image)
                             c['image'] = image_inter
                         try:
                             i_c = doc_flat[init_containers_tuple]
                             for c in i_c:
                                 image = c['image']
                                 image_inter = self.interpolate_image(
                                     self.variables, image)
                                 c['image'] = image_inter
                         except KeyError as e:
                             self.logger.error(
                                 f"initContainers not present for {name}")
                         doc = unflatten(doc_flat)
                         with open(
                                 Path(self.kustomize_builds_dir,
                                      name + '_inter.yaml'),
                                 'w') as outfile:
                             yaml.dump(doc,
                                       outfile,
                                       default_flow_style=False)
                     except KeyError as e:
                         self.logger.error(f"image is empty dict")
                 docs[i] = doc
             with open(self.interpolated_yaml, 'w') as outfile:
                 self.logger.info(
                     f"Writing {self.interpolated_yaml.resolve()}")
                 yaml.dump_all(docs, outfile, default_flow_style=False)
         except yaml.YAMLError as e:
             self.logger.error(e)
Ejemplo n.º 24
0
def REPLACE_FLAT_OBSERVATION(original_batch,
                             resampled_batch,
                             where_resampled,
                             environment):
    batch_flat = flatten(original_batch)
    resampled_batch_flat = flatten(resampled_batch)
    observation_keys = [
        key for key in batch_flat.keys()
        if key[0] == 'observations' or key[0] == 'next_observations'
    ]
    for key in observation_keys:
        state_size = int(batch_flat[key].shape[1] / 2)
        batch_flat[key][where_resampled][state_size:] = (
            resampled_batch_flat[key][state_size:])
    return unflatten(batch_flat)
Ejemplo n.º 25
0
def set_in_config(set_dict, prefix=None, path=default_config_path):
    if not prefix:
        prefix = tuple()

    config = load_config(path=path)
    if not config:
        return False

    flat_set_dict = flatten_dict.flatten(set_dict, keep_empty_types=(dict, ))
    flat_config = flatten_dict.flatten(config, keep_empty_types=(dict, ))
    for set_key, set_value in flat_set_dict.items():
        flat_config[prefix + set_key] = set_value

    unflatten_dict = flatten_dict.unflatten(flat_config)
    return update_config(unflatten_dict, path=path)
Ejemplo n.º 26
0
def generate_atom(pages):
    # Reformat pages dict to be a list of every unique page that has a date,
    # sorted by date and capped to 10 entries
    a = flatten(pages, reducer=tab_reducer)
    a = {
        k.replace('\t', '_',
                  k.count('\t') - 1): v
        for k, v in a.items() if k.count('\t') > 1
    }
    a = unflatten(a, splitter=tab_splitter)
    a = list(a.values())
    a = list(filter(lambda x: 'date' in x, a))
    a = sorted(a, key=lambda x: x['date'], reverse=True)
    a = a[:10]

    return render_template('atom.xml', entries=a, lastupdated=a[0]['date'])
Ejemplo n.º 27
0
    def merge_toml_dict(self) -> JsonDict:
        """Merge all included styles into a TOML (actually JSON) dictionary."""
        merged_dict = unflatten(self._merged_styles,
                                custom_splitter(SEPARATOR_FLATTEN))
        # TODO: fix: check if the merged style file is still needed
        merged_style_path: Path = self.cache_dir / MERGED_STYLE_TOML
        toml = TomlDoc(obj=merged_dict)

        attempt = 1
        while attempt < 5:
            try:
                merged_style_path.write_text(toml.reformatted)
                break
            except OSError:
                attempt += 1

        return merged_dict
Ejemplo n.º 28
0
def config_file_update():
    """
    Function to determine if we need to update our yaml configuration file after an upgrade.
    """
    log.debug('config_file_update() Started....')
    if os.path.isfile(skel_config_file):
        with open(config_file, 'r') as current_config:
            current_config = yaml.safe_load(current_config)
        with open(skel_config_file, 'r') as temp_config:
            temp_config = yaml.safe_load(temp_config)
        temp_current_config = flatten(current_config)
        temp_temp_config = flatten(temp_config)
        updates = (dict((k, v) for k, v in temp_temp_config.items()
                        if k not in temp_current_config))
        if updates != {}:
            copyfile(skel_config_file,
                     (str(Path.home()) +
                      '/.config/plot_manager/Config_Instructions.yaml'))
            copyfile(config_file, (
                str(Path.home()) +
                f'/.config/plot_manager/plot_manager.yaml.{current_military_time}'
            ))
            temp_current_config.update(updates)
            new_config = (dict((k, v) for k, v in temp_current_config.items()
                               if k in temp_temp_config))
        else:
            new_config = (dict((k, v) for k, v in temp_current_config.items()
                               if k not in temp_temp_config))
        if new_config != {}:
            new_config = (dict((k, v) for k, v in temp_current_config.items()
                               if k in temp_temp_config))
            current_config = unflatten(new_config)
            current_config.update({'configured': False})
            with open(
                (str(Path.home()) + '/.config/plot_manager/plot_manager.yaml'),
                    'w') as f:
                yaml.safe_dump(current_config, f)
            log.debug(
                f'Config File: {config_file} updated. Update as necessary to run this script.'
            )
            exit()
        else:
            log.debug('No config file changes necessary! No changes made.')
    else:
        log.debug('New configuration file not found. No changes made.')
Ejemplo n.º 29
0
def update_shared_dict(shared_values_dict: Dict[str, Any], new_info_dict: Dict[str, Any]) -> None:
    """Update the dictionary and log any critical changes.

    Because this is a nested dictionary, make sure to flatten and then
    unflatten it to ensure full updates.
    """
    flattened_new_dict = flatten(new_info_dict)
    flattened_shared_dict = flatten(shared_values_dict)
    flattened_shared_dict.update(flattened_new_dict)
    updated_shared_dict = unflatten(flattened_shared_dict)
    shared_values_dict.update(updated_shared_dict)

    new_recording_directory: Optional[str] = attempt_to_get_recording_directory_from_new_dict(new_info_dict)

    if new_recording_directory is not None:
        scrubbed_recordings_dir = redact_sensitive_info_from_path(new_recording_directory)
        msg = f"Using directory for recording files: {scrubbed_recordings_dir}"
        logger.info(msg)
Ejemplo n.º 30
0
    def batch_by_indices(self, indices, field_name_filter=None):
        if np.any(indices % self._max_size > self.size):
            raise ValueError(
                "Tried to retrieve batch with indices greater than current"
                " size")

        field_names_flat = self.fields_flat.keys()
        if field_name_filter is not None:
            field_names_flat = self.filter_fields(field_names_flat,
                                                  field_name_filter)

        batch_flat = {
            field_name: self.data[field_name][indices]
            for field_name in field_names_flat
        }

        batch = unflatten(batch_flat)
        return batch