def params_unprepare_from_saved(fields, copy_to_legacy=False): """ Unescape all section and param names for hyper params and configuration If copy_to_legacy is set then copy hyperparams and configuration data to the legacy location for the old clients """ for param_field in ("hyperparams", "configuration"): params = safe_get(fields, param_field) if params: unescaped_params = { ParameterKeyEscaper.unescape(key): { ParameterKeyEscaper.unescape(k): v for k, v in value.items() } if isinstance(value, dict) else value for key, value in params.items() } dpath.set(fields, param_field, unescaped_params) if copy_to_legacy: for new_params_field, old_params_field, use_sections in ( (f"hyperparams", "execution/parameters", True), (f"configuration", "execution/model_desc", False), ): legacy_params = _get_legacy_params( safe_get(fields, new_params_field), with_sections=use_sections ) if legacy_params: dpath.new( fields, old_params_field, {_get_full_param_name(p): p["value"] for p in legacy_params}, )
def _assign_definition_value(self, var_name, var_value, var_assignments): assignment_regex = self._generate_var_evaluation_regex(var_name) var_file = var_assignments['var_file'] var_value_string = str(var_value) for (assignment_file, assignments) in var_assignments['definitions'].items(): # Save evaluation information in context for assignment_obj in assignments: definition_path = assignment_obj.get('definition_path') entry_expression = assignment_obj.get('definition_expression') definition_name = assignment_obj.get('definition_name') context_path, _ = self._extract_context_path(definition_path) if assignment_file in self.definitions_context.keys(): dpath.new(self.definitions_context[assignment_file], f'evaluations/{var_name}/var_file', var_file) dpath.new(self.definitions_context[assignment_file], f'evaluations/{var_name}/value', var_value) dpath.new(self.definitions_context[assignment_file], f'evaluations/{var_name}/definitions', assignments) if self._is_variable_only_expression(assignment_regex, entry_expression): # Preserve the original type of the variable if not part of a composite expression evaluated_definition = var_value else: evaluated_definition = re.sub(assignment_regex, var_value_string, entry_expression) dpath.set(self.tf_definitions[assignment_file], definition_path, evaluated_definition) self.logger.debug( f'Evaluated definition {definition_name} in file {assignment_file}: default value of variable {var_file}: ' f'{var_name} to "{var_value_string}"')
def _assign_definition_value(self, var_name, var_value, var_assignments): assignment_regex = self._generate_var_evaluation_regex(var_name) var_file = var_assignments['var_file'] for (assignment_file, assignments) in var_assignments['definitions'].items(): # Save evaluation information in context for assignment_obj in assignments: definition_path = assignment_obj.get('definition_path') entry_expression = assignment_obj.get('definition_expression') definition_name = assignment_obj.get('definition_name') context_path, _ = self._extract_context_path(definition_path) dpath.new(self.definitions_context[assignment_file], f'evaluations/{var_name}/var_file', var_file) dpath.new(self.definitions_context[assignment_file], f'evaluations/{var_name}/value', var_value) dpath.new(self.definitions_context[assignment_file], f'evaluations/{var_name}/definitions', assignments) evaluated_value = str(var_value) evaluated_definition = re.sub(assignment_regex, evaluated_value, entry_expression) dpath.set(self.tf_definitions[assignment_file], definition_path, evaluated_definition) self.logger.debug( f'Evaluated definition {definition_name} in file {assignment_file}: default value of variable {var_file}: ' f'{var_name} to "{evaluated_value}"')
def my_path(my_dictionary: dict = None, path=None, value=None): items = path.split('/') # if the last character is a digit if items[-1].isdigit(): items.pop() new_path = "/".join(items) # Check the element before the digit is already in the dictionary if dpath.search(my_dictionary, new_path): # Is it a list ? if isinstance(dpath.get(my_dictionary, new_path), list): dpath.new(my_dictionary, path, value) else: raise ValueError("The following entry can not be added: {}. " "This is not a list".format( my_dictionary[items[-2]])) # Not in the dictionary else: dpath.new(my_dictionary, new_path, []) dpath.new(my_dictionary, path, value) else: if dpath.search(my_dictionary, path): # Set the new value dpath.set(my_dictionary, path, value) else: dpath.new(my_dictionary, path, value) return my_dictionary
def _expand_reference_fields(cls, doc_cls, result, fields): for ref_field_name in fields: ref_ids = cls._search(doc_cls, result, ref_field_name, only_values=False) if not ref_ids: continue for path, value in ref_ids: dpath.set(result, path, {'id': value} if value else {}, separator='.')
def params_prepare_for_save(fields: dict, previous_task: Task = None): """ If legacy hyper params or configuration is passed then replace the corresponding section in the new structure Escape all the section and param names for hyper params and configuration to make it mongo sage """ for old_params_field, new_params_field, default_section in ( ("execution/parameters", "hyperparams", hyperparams_default_section), ("execution/model_desc", "configuration", None), ): legacy_params = safe_get(fields, old_params_field) if legacy_params is None: continue if ( not safe_get(fields, new_params_field) and previous_task and previous_task[new_params_field] ): previous_data = previous_task.to_proper_dict().get(new_params_field) removed = _remove_legacy_params( previous_data, with_sections=default_section is not None ) if not legacy_params and not removed: # if we only need to delete legacy fields from the db # but they are not there then there is no point to proceed continue fields_update = {new_params_field: previous_data} params_unprepare_from_saved(fields_update) fields.update(fields_update) for full_name, value in legacy_params.items(): section, name = split_param_name(full_name, default_section) new_path = list(filter(None, (new_params_field, section, name))) new_param = dict(name=name, type=hyperparams_legacy_type, value=str(value)) if section is not None: new_param["section"] = section dpath.new(fields, new_path, new_param) dpath.delete(fields, old_params_field) for param_field in ("hyperparams", "configuration"): params = safe_get(fields, param_field) if params: escaped_params = { ParameterKeyEscaper.escape(key): { ParameterKeyEscaper.escape(k): v for k, v in value.items() } if isinstance(value, dict) else value for key, value in params.items() } dpath.set(fields, param_field, escaped_params)
def _assign_definition_value(self, definition_type, var_name, var_value, var_assignments): """ assigns var_value to variable var_name in tf_definitions :param definition_type: the entity's block type :param var_name: variable name :param var_value: variable value :param var_assignments: variable assignments """ assignment_regex = self._generate_evaluation_regex( definition_type, var_name) var_file = var_assignments['var_file'] var_value_string = str(var_value) for (assignment_file, assignments) in var_assignments['definitions'].items(): # Save evaluation information in context for assignment_obj in assignments: definition_path = assignment_obj.get('definition_path') entry_expression = assignment_obj.get('definition_expression') definition_name = assignment_obj.get('definition_name') if not isinstance(entry_expression, str): # Example of unsupported evaluation: # cidr_blocks = local.ip_ranges.ipv4Prefixes[*].prefix logging.info( f'Ran into a complex evaluation which isn\'t supported yet, on {assignment_file}' ) continue context_path, _ = self.extract_context_path(definition_path) if assignment_file in self.definitions_context.keys(): dpath.new(self.definitions_context[assignment_file], f'evaluations/{var_name}/var_file', var_file) dpath.new(self.definitions_context[assignment_file], f'evaluations/{var_name}/value', var_value) dpath.new(self.definitions_context[assignment_file], f'evaluations/{var_name}/definitions', assignments) if self._is_variable_only_expression(assignment_regex, entry_expression): # Preserve the original type of the variable if not part of a composite expression evaluated_definition = var_value else: evaluated_definition = re.sub(assignment_regex, re.escape(var_value_string), entry_expression) dpath.set(self.tf_definitions[assignment_file], definition_path, evaluated_definition) self.logger.debug( f'Evaluated definition {definition_name} in file {assignment_file}: default value of variable {var_file}: ' f'{var_name} to "{var_value_string}"')
def set(self, key: str, value: Any) -> Any: """Set config value. Args: key (str): Key (in dot-notation) to update. value (Any): Value to set Returns: Any: Updated config """ dpath.set(self._config, key, value) self.log.debug(f"set config value [{key}] => {value}") return self.sync()
def unprepare_from_saved(call: APICall, tasks_data: Union[Sequence[dict], dict]): if isinstance(tasks_data, dict): tasks_data = [tasks_data] conform_output_tags(call, tasks_data) for task_data in tasks_data: parameters = safe_get(task_data, "execution/parameters") if parameters is not None: # Escape keys to make them mongo-safe parameters = { ParameterKeyEscaper.unescape(k): v for k, v in parameters.items() } dpath.set(task_data, "execution/parameters", parameters)
def prepare_for_save(call: APICall, fields: dict, previous_task: Task = None): conform_tag_fields(call, fields, validate=True) params_prepare_for_save(fields, previous_task=previous_task) # Strip all script fields (remove leading and trailing whitespace chars) to avoid unusable names and paths for field in task_script_fields: try: path = f"script/{field}" value = dpath.get(fields, path) if isinstance(value, str): value = value.strip() dpath.set(fields, path, value) except KeyError: pass return fields
def prepare_create_fields(call: APICall, valid_fields=None, output=None, previous_task: Task = None): valid_fields = valid_fields if valid_fields is not None else create_fields t_fields = task_fields t_fields.add("output_dest") fields = parse_from_call(call.data, valid_fields, t_fields) # Move output_dest to output.destination output_dest = fields.get("output_dest") if output_dest is not None: fields.pop("output_dest") if output: output.destination = output_dest else: output = Output(destination=output_dest) fields["output"] = output try: dpath.delete(fields, "script/requirements") except dpath.exceptions.PathNotFound: pass # Make sure there are no duplicate tags tags = fields.get("tags") if tags: fields["tags"] = list(set(tags)) # Strip all script fields (remove leading and trailing whitespace chars) to avoid unusable names and paths for field in task_script_fields: try: path = "script/%s" % field value = dpath.get(fields, path) if isinstance(value, six.string_types): value = value.strip() dpath.set(fields, path, value) except KeyError: pass parameters = safe_get(fields, "execution/parameters") if parameters is not None: parameters = {k.strip(): v for k, v in parameters.items()} dpath.set(fields, "execution/parameters", parameters) return fields
def prepare_for_save(call: APICall, fields: dict, previous_task: Task = None): conform_tag_fields(call, fields, validate=True) params_prepare_for_save(fields, previous_task=previous_task) artifacts_prepare_for_save(fields) ModelsBackwardsCompatibility.prepare_for_save(call, fields) DockerCmdBackwardsCompatibility.prepare_for_save(call, fields) for path in dict_fields_paths: escape_dict_field(fields, path) # Strip all script fields (remove leading and trailing whitespace chars) to avoid unusable names and paths for field in task_script_stripped_fields: try: path = f"script/{field}" value = dpath.get(fields, path) if isinstance(value, str): value = value.strip() dpath.set(fields, path, value) except KeyError: pass return fields
def prepare_for_save(call: APICall, fields: dict): conform_tag_fields(call, fields) # Strip all script fields (remove leading and trailing whitespace chars) to avoid unusable names and paths for field in task_script_fields: try: path = f"script/{field}" value = dpath.get(fields, path) if isinstance(value, str): value = value.strip() dpath.set(fields, path, value) except KeyError: pass parameters = safe_get(fields, "execution/parameters") if parameters is not None: # Escape keys to make them mongo-safe parameters = {ParameterKeyEscaper.escape(k): v for k, v in parameters.items()} dpath.set(fields, "execution/parameters", parameters) return fields
if isinstance(v, (_dict, _list)): # Recursive set. set_all_values(v, value, _dict=_dict, _list=_list) # Otherwise, dict_or_list[k] = value # Process if list. elif isinstance(dict_or_list, _list): for i, v in enumerate(dict_or_list): if isinstance(v, (_dict, _list)): # Recursive set. set_all_values(v, value, _dict=_dict, _list=_list) # Set value. dict_or_list[i] = value else: # Class past that is not in _dict or _list. raise ValueError("dict_or_list was %r, not %r or %r." % (type(dict_or_list, _dict, _list))) from pprint import pprint import copy #pprint(flatten(dpath.search(stats, "oprs"))) m = copy.deepcopy(match) dpath.set(m, "**", None, afilter=lambda x: False if isinstance(x, (dict, list)) else print(x) or True)
def execute_postponed_actions(context, is_data: bool = True, actions_switcher: dict = None): """ Execute postponed actions :param actions_switcher: the automaton list of actions which can be postponed :param context: the current context :param is_data: boolean, True means set only data, False means execute actions (use models) :return: None """ log.debug("postponed_action") if is_data: # Do settings log.debug("is data statement") key_list = set(ATTRIBUTE_LIST).intersection( context.pre_conditions["postponed"].keys()) for key in key_list: log.debug("Key : {}".format(str(key))) while context.pre_conditions["postponed"][key]: log.debug("Context.pre_conditions[postponed][key]: {} ".format( context.pre_conditions["postponed"][key])) elem = context.pre_conditions["postponed"][key].pop(0) log.debug("elem contains: {}".format(elem)) try: # Check the path exist if dpath.search(context.pre_conditions[key], elem["path"]): dpath.set(context.pre_conditions[key], elem["path"], elem["value"]) else: # Create a new entry dpath.new(context.pre_conditions[key], elem["path"], elem["value"]) except AssertionError as assertion: log.error("Update data failed.\n '{}'".format( assertion.args[0])) raise_exception( AssertionError, "Update data failed.\n '{}'".format(assertion.args[0]), context.evidence_folder) log.debug("pre_conditions : {}".format( str(context.pre_conditions[key]))) else: # Do execute steps log.debug("else statement") postponed_actions = deepcopy( context.pre_conditions["postponed"]["execution"]) context.pre_conditions["postponed"]["execution"].clear() for index, action in enumerate(postponed_actions): log.debug("Index: {}, action: {}".format(index, action)) if isinstance(action, str): context.execute_steps(action) elif isinstance(action, tuple) and len(action) == 2: log.debug("action switcher: {}".format(action)) actions_switcher[action[0]](**action[1]) sleep(0.5) else: raise Exception("Unknown action to process.\n" "Get '{}'".format(repr(action))) log.debug("End of postponed_action ")