def DeleteItemInDict(item, item_path, item_sep='.'): """Finds and deletes (potentially) nested value based on specified node_path. Args: item: Dict, Map like object to search. item_path: str, An item_sep separated path to nested item in map. item_sep: str, Path item separator, default is '.'. Raises: KeyError: If item_path not found or empty. """ if not item_path: raise KeyError('Missing Path') parts = item_path.split(item_sep) parts.reverse() context = item while parts: part = parts.pop() if part in context and yaml.dict_like(context): elem = context.get(part) if not parts: if elem: del context[part] else: raise KeyError('Path [{}] not found'.format(item_path)) else: context = elem else: raise KeyError('Path [{}] not found'.format(item_path))
def ForComplex(cls, value): """Generates the correct type of assertion for the given assertion value. If value is just a string, it will default to Equals. Otherwise, it expects value to be a dictionary with one of [equals, matches, is_none, in] and the corresponding value will be the assertion value. Args: value: The assertion value. Raises: ValueError: If the given value cannot be converted to an assertion. Returns: Assertion, The correct assertion type. """ if not yaml.dict_like(value): return EqualsAssertion(value) elif 'equals' in value: return EqualsAssertion(value['equals']) elif 'matches' in value: return MatchesAssertion(value['matches']) elif 'is_none' in value: return IsNoneAssertion(value['is_none']) elif 'in' in value: return InAssertion(value['in']) # This should never happen for things that pass schema validation. raise ValueError('Assertion type is invalid.')
def __init__(self, headers, payload, omit_fields): self._headers = headers if yaml.dict_like(payload): payload = json.dumps(payload) payload = payload or '' self._payload = http_encoding.Encode(payload) self._omit_fields = omit_fields
def StandardUpdateHook(self, actual): """Updates the backing data based on the correct actual value.""" if actual is None and yaml.dict_like(self._data_dict): if self._field in self._data_dict: del self._data_dict[self._field] else: self._data_dict[self._field] = actual return True
def LoadYamlFromPath(path): try: data = yaml.load_path(path) except yaml.Error as e: raise cloudbuild_exceptions.ParserError(path, e.inner_error) if not yaml.dict_like(data): raise cloudbuild_exceptions.ParserError( path, "Could not parse as a dictionary.") return data
def FindOrSetItemInDict(item, item_path, item_sep='.', set_value=None): """Finds (potentially) nested value based on specified node_path. If set_value is passed will set the value at item_path, creating if needed. Args: item: Dict, Map like object to search. item_path: str, An item_sep separated path to nested item in map. item_sep: str, Path item separator, default is '.'. set_value: object, value to set at item_path. If path is not found create a new item at item_path with value of set_value. Returns: Object, data found in input item at item_path or None. Raises: KeyError: If item_path not found or empty. """ if not item_path: raise KeyError(item_path) parts = item_path.split(item_sep) parts.reverse() context = item while parts: part = parts.pop() if part in context and yaml.dict_like(context): # path element exists in # in context AND context # is dict() like. if set_value and not parts: # e.g. at bottom of path with a value to set context[part] = set_value context = context.get(part) # continue down the path else: # part not found if set_value and yaml.dict_like(context): # Upsert New Value if possible, # otherwise, Error if parts: # more of the path remains, so insert empty containers context[part] = collections.OrderedDict() context = context.get(part) else: # e.g. at bottom of path context[part] = set_value else: raise KeyError('Path [{}] not found'.format(item_path)) return context
def Resolve(self, data, extracted_only=False, parent=None, field=None): """Recursively resolves references in the given data. Args: data: The data structure you want to recursively resolve references in. extracted_only: bool, If true, only extracted references will be resolved (not generated or defined references). parent: dict, The data structure of the parent of the current field being resolved (used for location information). field: str, The name of the current field being resolved (used for location information). Raises: UnknownReferenceError: If a reference cannot be resolved. Returns: The original data structure with references resolved. """ if data is None: pass elif isinstance(data, six.string_types): refs = re.findall(r'\$\$([-\w_]+)\$\$', data) for r in refs: value = self._resource_ids.get(r) if value is None: value = self._extracted_ids.get(r) if value is None: value = self._references.get(r) if value is None: raise UnknownReferenceError( 'Unknown reference {}: [{}]'.format( assertions.FormatLocation( updates.Location(parent, field)), r)) if extracted_only and r not in self._extracted_ids: continue data = data.replace('$$' + r + '$$', value) # We intentionally replace the data in-place here so that the same data # structure can be written back out to the scenario file. elif yaml.list_like(data): for x in range(len(data)): data[x] = self.Resolve(data[x], extracted_only=extracted_only, parent=data, field=None) elif yaml.dict_like(data): for k in data: data[k] = self.Resolve(data[k], extracted_only=extracted_only, parent=data, field=k) # Things like ints, bools, etc. that don't need processing fall through. return data
def _CheckNode(self, context, field, node, expected): if yaml.dict_like(expected): return self._CheckDictValue(context, field, node, expected) elif yaml.list_like(expected): return self._CheckListValue(context, field, node, expected) else: # If not a list or Dict, do absolute comparison of the scalar value. if node != expected: return [ Failure.ForDict(context, field, expected, node, key_as_path=False) ] return []
def _ReverseResolve(self, data, sorted_ids): """Recursively reverse resolves references in the given data.""" if data is None: pass elif isinstance(data, six.string_types): for reference, value in sorted_ids: data = data.replace(value, '$$' + reference + '$$') elif yaml.list_like(data): for x in range(len(data)): data[x] = self.ReverseResolve(data[x]) elif yaml.dict_like(data): for k in data: data[k] = self.ReverseResolve(data[k]) # Things like ints, bools, etc. that don't need processing fall through. return data
def testGetAuthProviders(self): # Test provider objects auth_objs = self.login_config.GetAuthProviders(name_only=False) self.assertEqual(len(auth_objs), 5) self.assertTrue(all((yaml.dict_like(i) for i in auth_objs))) # Test Provider Strings self.assertEqual(self.login_config.GetAuthProviders(), ['basic', 'oidc1', 'oidc2', 'ldap1', 'ldap2']) # Test Providers fail for V1 self.assertIsNone(self.login_config_v1.GetAuthProviders()) del self.login_config[file_parsers.LoginConfigObject.AUTH_PROVIDERS_KEY] # Test Providers fail if not found self.assertIsNone(self.login_config.GetAuthProviders())
def _CheckDictValue(self, context, field, actual, expected): """Validate actual dict value against expected.""" if not yaml.dict_like(actual): return [ Failure.ForDict(context, field, expected, actual, msg='Expected type(dict).', key_as_path=False) ] failures = [] context = context.ForKey(field, key_as_path=False) for key, value in six.iteritems(expected): failures.extend( self._CheckNode(context, key, actual.get(key), value)) return failures
def _CheckNode(self, context, field, node, expected): # TODO(b/78588819): There is a lot of duplication in here. Needs to be # completely refactored. if yaml.dict_like(expected): return self._CheckDictValue(context, field, node, expected) elif yaml.list_like(expected): return self._CheckListValue(context, field, node, expected) else: # If not a list or Dict, do absolute comparison of the scalar value. if node != expected: return [ Failure.ForDict(context, field, expected, node, key_as_path=False) ] return []
def _UpdateTypesForOutput(self, val): """Dig through a dict of list of primitives to help yaml output. Args: val: A dict, list, or primitive object. Returns: An updated version of val. """ if isinstance(val, six.string_types) and '\n' in val: return YamlPrinter._LiteralLines(val) if list_like(val): for i in range(len(val)): val[i] = self._UpdateTypesForOutput(val[i]) return val if dict_like(val): for key in val: val[key] = self._UpdateTypesForOutput(val[key]) return val return val
def __call__(self, file_path): map_file_dict = yaml.load_path(file_path) map_dict = {} if not yaml.dict_like(map_file_dict): raise arg_parsers.ArgumentTypeError( 'Invalid YAML/JSON data in [{}], expected map-like data.'. format(file_path)) for key, value in map_file_dict.items(): if self.key_type: try: key = self.key_type(key) except ValueError: raise arg_parsers.ArgumentTypeError( 'Invalid key [{0}]'.format(key)) if self.value_type: try: value = self.value_type(value) except ValueError: raise arg_parsers.ArgumentTypeError( 'Invalid value [{0}]'.format(value)) map_dict[key] = value return map_dict
def FromBackingData(cls, backing_data): """"Create a response from the backing data.""" response_payload_data = (backing_data['api_call'].get('return_response') or collections.OrderedDict()) status = response_payload_data.get('status') # Get the status from the header, for any httplib2 generated scenario tests. if not status: status = int(response_payload_data.get('headers', {}).get('status', httplib.OK)) headers = response_payload_data.get('headers', {}).copy() headers.pop('status', None) response_body = response_payload_data.get('body') if yaml.dict_like(response_body): response_body = json.dumps(response_body) response = Response( status, headers, response_body or '') return HTTPResponsePayload(response, response_payload_data.get('omit_fields'))
def IsResourceLike(item): """Return True if item is a dict like object or list of dict like objects.""" return yaml.dict_like(item) or (yaml.list_like(item) and all(yaml.dict_like(x) for x in item))