def _extract_tag(self): """Extracts `TextValue` of tag from the current event.""" value = getattr(self._event, 'tag', None) if value is None or value == '!': return None tag = TextValue(value) tag.span = self._extract_property_span(self._event.start_mark, '!') return tag
def _extract_anchor(self): """Extracts `TextValue` of anchor from the current event.""" value = getattr(self._event, 'anchor', None) if value is None or value in ['*', '&']: return None anchor = TextValue(value) symbol = '&' if isinstance(self._event, pyyaml.AliasEvent): symbol = '*' anchor.span = self._extract_property_span(self._event.start_mark, symbol) return anchor
def test_check_abstractrecord(): type1 = dict(name='type1') type2 = dict(name='type2') type3 = dict(name='type3') input_type = dict(default_descendant=type1, implementations={ 'type1': type1, 'type2': type2, 'type3': type3 }, name='MyAbstract') input_type_no_default = dict(implementations={ 'type1': type1, 'type2': type2, 'type3': type3 }) node = MappingDataNode() node.type = TextValue() node.type.value = 'type2' assert checks.get_abstractrecord_type(node, input_type) == type2 assert (checks.get_abstractrecord_type(MappingDataNode(), input_type) == type1) node.type.value = 'type3' assert checks.get_abstractrecord_type(node, input_type_no_default) == type3 with pytest.raises(Notification) as excinfo: checks.get_abstractrecord_type(MappingDataNode(), input_type_no_default) assert excinfo.value.name == 'MissingAbstractType' node.type.value = 'invalid' with pytest.raises(Notification) as excinfo: checks.get_abstractrecord_type(node, input_type) assert excinfo.value.name == 'InvalidAbstractType'
def _create_record_key(self): """Creates `TextValue` of record key.""" # check if key is scalar if not isinstance(self._event, pyyaml.ScalarEvent): start_pos = Position.from_mark(self._event.start_mark) self._create_fatal_error_node(start_pos) notification = Notification.from_name('ComplexRecordKey') notification.span = self._fatal_error_node.span self.notification_handler.report(notification) self._event = None self._iterate_events = False return None key = TextValue() key.value = self._event.value key.span = Span.from_event(self._event) return key
def test_expand_value_to_array(): node = ScalarDataNode() node.value = 5 node.parent = MappingDataNode() node.key = TextValue() node.key.value = 'path' node = ac.transposer._expand_value_to_array(node) node = ac.transposer._expand_value_to_array(node) node = ac.transposer._expand_value_to_array(node) assert node.get_node_at_path('/path/0/0/0').value == 5
def _create_fatal_error_node(self, start_pos): """ Creates a non-existing node in the data tree to wrap the content of the error (span) in a node. """ node = ScalarDataNode() end_pos = Position.from_document_end(self._document) node.span = Span(start_pos, end_pos) node.key = TextValue('fatal_error') node.origin = DataNode.Origin.error node.hidden = True self._fatal_error_node = node
def _shift_array(node, input_type): """If all children is scalar, bur is expected array, duplicate and shift it""" if input_type['subtype']['base_type'] == 'Array' and \ len(node.children)>0 and Transposer._can_shift(node, input_type): children = node.children node.children = [] array_node = SequenceDataNode(TextValue(str(0)), node) array_node.span = node.span array_node.children = deepcopy(children) node.set_child(array_node) return True return False
def _expand_value_to_array(node, value=None): """Expands node value to an array.""" array_node = SequenceDataNode(node.key, node.parent) array_node.span = node.span node.parent = array_node node.key = TextValue('0') if node.input_type is not None: array_node.input_type = node.input_type node.input_type = array_node.input_type['subtype'] if value is None: array_node.children.append(node) else: array_node.children.append(value) array_node.origin = DataNode.Origin.ac_array return array_node
def _validate_abstract(self, node, input_type): """Validates an AbtractRecord node.""" try: concrete_type = checks.get_abstractrecord_type(node, input_type) except Notification as notification: if notification.name == 'InvalidAbstractType': notification.span = node.type.span else: notification.span = get_node_key(node).notification_span self._report_notification(notification) else: if node.type is None: # if default_descendant defines the Abstract type, add it to data structure node.type = TextValue() node.type.value = concrete_type.get('name') node.type.span = Span(node.span.start, node.span.start) concrete_type['implemented_abstract_record'] = input_type node.input_type = concrete_type self._validate_record(node, concrete_type)
def _create_array_node(self): """Creates an array node.""" node = SequenceDataNode() start_mark = self._event.start_mark end_mark = self._event.end_mark self._next_parse_event() while (self._event is not None and not isinstance(self._event, pyyaml.SequenceEndEvent)): key = TextValue(str(len(node.children))) child_node = self._create_node(node) self._next_parse_event() if child_node is None: # i.e. unresolved alias continue child_node.key = key node.children.append(child_node) if self._event is not None: # update end_mark when array ends correctly end_mark = self._event.end_mark elif node.children: end_mark = node.children[-1].span.end end_mark.line -= 1 end_mark.column -= 1 node.span = Span.from_marks(start_mark, end_mark) return node
def try_expand_reducible(node, input_type): """Initializes a record from the reducible_to_key value.""" if input_type is None or \ not (input_type['base_type'] in ['Record', 'Abstract']) or \ node.implementation == DataNode.Implementation.mapping: return None if input_type['base_type'] == 'Record': child_type = input_type else: try: child_type = Transposer._get_it_concrete(node, input_type) except (KeyError, AttributeError): return None if child_type is None or 'reducible_to_key' not in child_type: return None key = child_type['reducible_to_key'] if key is None: return None record_node = MappingDataNode(node.key, node.parent) record_node.span = node.span if hasattr(node, 'type'): record_node.type = node.type node.type = None node.parent = record_node node.origin = DataNode.Origin.ac_reducible_to_key node.key = TextValue(key) if node.input_type is not None: record_node.input_type = node.input_type node.input_type = child_type['keys'][key]['type'] record_node.children.append(node) return record_node
def test_autoconvert(): it_record = dict(base_type='Record', keys={ 'a': { 'default': { 'type': 'obligatory' }, 'type': dict(base_type='Integer') } }, name='MyRecord', reducible_to_key='a') it_array = dict(base_type='Array', subtype=dict(base_type='Array', subtype=it_record)) input_type = dict(base_type='Record', keys={'path': { 'type': it_array }}, name='Root') root = MappingDataNode() node = ScalarDataNode(TextValue('path'), root, 2) root.children.append(node) converted = ac.autoconvert(root, input_type) assert converted.get_node_at_path('/path/0/0/a').value == 2
def make_transposition(cls, node, input_type, in_transposition): """Transpose a record or scalar into an array.""" # if node is scalar, convert it to array if node.implementation == DataNode.Implementation.scalar: return cls._expand_value_to_array(node) if len(node.children)==1 and len(node.children[0].children)==1 and \ cls._is_dup(node, input_type): # automatic conversion to array with one element is fitted return cls._expand_value_to_array(node) if in_transposition: if not cls._shift_array(node, input_type): # only automatic conversion to array with one element is permited notification = Notification.from_name( 'UnsupportedTranspositionInTransposition', input_type['base_type']) notification.span = node.span notification_handler.report(notification) return node cls.init() # verify that subtype is record subtype = input_type['subtype'] if subtype['base_type'] != 'Record' and \ ( subtype['base_type'] != 'Abstract' or \ not hasattr(node, 'type') or \ node.type.value not in input_type['subtype']['implementations'] ): notification = Notification.from_name('UnsupportedTransposition', input_type['base_type']) notification.span = node.span notification_handler.report(notification) return node assert node.implementation == DataNode.Implementation.mapping,\ "Can not perform transposition on array" # get array size try: cls._get_transformation_array_size(node, subtype) except Notification as notification: notification_handler.report(notification) return node if cls.array_size is None: cls.array_size = 1 cls._prepare_transformation(node, subtype) # create array array_node = SequenceDataNode(node.key, node.parent) array_node.span = node.span array_node.input_type = node.input_type array_node.origin = DataNode.Origin.ac_array template_node = deepcopy(node) template_node.parent = array_node template_node.input_type = subtype template_node.origin = DataNode.Origin.ac_transposition # create and transpose items of the array for i in range(cls.array_size): child_node = deepcopy(template_node) child_node.key = TextValue(str(i)) # convert array to value for path in cls.paths_to_convert: node_to_convert = child_node.get_node_at_path(path) if i >= len(node_to_convert.children) and len( node_to_convert.children) != 1: converted_node = ScalarDataNode(node_to_convert.key, node_to_convert.parent, node_to_convert.value) converted_node.span = Span(node_to_convert.span.start, node_to_convert.span.end) else: if path in cls.paths_to_dup: converted_node = node_to_convert elif len(node_to_convert.children) == 1: converted_node = node_to_convert.children[0] elif path in cls.paths_to_dup: converted_node = node_to_convert else: converted_node = node_to_convert.children[i] converted_node.parent = node_to_convert.parent converted_node.key = node_to_convert.key if converted_node.type is None or 'keys' not in subtype or \ converted_node.key.value not in subtype['keys']: converted_node.type = node_to_convert.type node_to_convert.parent.set_child(converted_node) array_node.children.append(child_node) return array_node