def _split_children(cls, lines, node, func, is_flow=True): """Find and write separators (borders) positions between nodes""" if len(node.children) > 0: na = NodeAnalyzer(lines, node) start_pos = func(lines, na.get_node_key_end(), node.children[0].start) if len(node.children) == 1: if is_flow: end_pos = func(lines, node.children[0].end, node.end) else: end_pos = node.children[0].end else: end_pos = func(lines, node.children[0].end, node.children[1].start) node.children[0].is_flow = is_flow node.children[0].delimiters = Span(start_pos, end_pos) for i in range(1, len(node.children) - 1): start_pos = end_pos end_pos = func(lines, node.children[i].end, node.children[i + 1].start) node.children[i].is_flow = is_flow node.children[i].delimiters = Span(start_pos, end_pos) if len(node.children) > 1: start_pos = end_pos if is_flow: end_pos = func(lines, node.children[len(node.children) - 1].end, node.end) else: end_pos = node.children[len(node.children) - 1].end node.children[len(node.children) - 1].is_flow = is_flow node.children[len(node.children) - 1].delimiters = Span( start_pos, end_pos)
def _create_record_node(self): """Creates a record node.""" node = MappingDataNode() start_mark = self._event.start_mark end_mark = self._event.end_mark self._next_parse_event() # create children while (self._event is not None and not isinstance(self._event, pyyaml.MappingEndEvent)): key = self._create_record_key() self._next_parse_event() # value event if not key: # if key is invalid continue if self._event is None: break # something went wrong, abandon ship! if key.value == '<<': # handle merge self._perform_merge(key, node) self._next_parse_event() continue child_node = self._create_node(node) self._next_parse_event() if child_node is None: # i.e. unresolved alias continue child_node.key = key node.set_child(child_node, False) if self._event is not None: # update end_mark when map ends correctly end_mark = self._event.end_mark elif node.children: end_mark = node.children[-1].span.end end_mark.line -= 1 end_mark.column -= 1 node.span = Span.from_marks(start_mark, end_mark) return node
def _extract_property_span(self, start_mark, symbol): """ Create a `Span` from the first `symbol` at `start_mark`. Used to get the span of node properties like anchors or tags. """ # set document to start at start_mark lines = self._document.splitlines() line_index = start_mark.line line = lines[line_index] line = line[start_mark.column:] # first line offset expr = '[{symbol}]([a-zA-Z0-9_:-]+)'.format(symbol=symbol) regex = re.compile(expr) match = regex.search(line) if match is not None: # set correct offset of match start_column = start_mark.column + match.start(1) end_column = start_mark.column + match.end(1) while match is None and line_index < len(lines) - 1: line_index += 1 line = lines[line_index] match = regex.search(line) start_column = locals().get('start_column', match.start(1)) end_column = locals().get('end_column', match.end(1)) start = Position(line_index + 1, start_column + 1) end = Position(line_index + 1, end_column + 1) return Span(start, end)
def _create_fatal_error_node(self, start_pos): """ Creates a non-existing node in the data tree to wrap the content of the error (span) in a node. """ node = ScalarDataNode() end_pos = Position.from_document_end(self._document) node.span = Span(start_pos, end_pos) node.key = TextValue('fatal_error') node.origin = DataNode.Origin.error node.hidden = True self._fatal_error_node = node
def _create_record_key(self): """Creates `TextValue` of record key.""" # check if key is scalar if not isinstance(self._event, pyyaml.ScalarEvent): start_pos = Position.from_mark(self._event.start_mark) self._create_fatal_error_node(start_pos) notification = Notification.from_name('ComplexRecordKey') notification.span = self._fatal_error_node.span self.notification_handler.report(notification) self._event = None self._iterate_events = False return None key = TextValue() key.value = self._event.value key.span = Span.from_event(self._event) return key
def _create_scalar_node(self): """Creates a ScalarDataNode.""" node = ScalarDataNode() tag = self._event.tag if tag is None or not tag.startswith('tag:yaml.org,2002:'): tag = resolve_scalar_tag(self._event.value) node.span = Span.from_event(self._event) try: node.value = construct_scalar(self._event.value, tag) except Exception as error: notification = Notification.from_name('ConstructScalarError', error.args[0]) notification.span = node.span self.notification_handler.report(notification) return node if node.value is None: # alter position of empty node (so it can be selected) node.span.end.column += 1 return node
def _validate_abstract(self, node, input_type): """Validates an AbtractRecord node.""" try: concrete_type = checks.get_abstractrecord_type(node, input_type) except Notification as notification: if notification.name == 'InvalidAbstractType': notification.span = node.type.span else: notification.span = get_node_key(node).notification_span self._report_notification(notification) else: if node.type is None: # if default_descendant defines the Abstract type, add it to data structure node.type = TextValue() node.type.value = concrete_type.get('name') node.type.span = Span(node.span.start, node.span.start) concrete_type['implemented_abstract_record'] = input_type node.input_type = concrete_type self._validate_record(node, concrete_type)
def _create_array_node(self): """Creates an array node.""" node = SequenceDataNode() start_mark = self._event.start_mark end_mark = self._event.end_mark self._next_parse_event() while (self._event is not None and not isinstance(self._event, pyyaml.SequenceEndEvent)): key = TextValue(str(len(node.children))) child_node = self._create_node(node) self._next_parse_event() if child_node is None: # i.e. unresolved alias continue child_node.key = key node.children.append(child_node) if self._event is not None: # update end_mark when array ends correctly end_mark = self._event.end_mark elif node.children: end_mark = node.children[-1].span.end end_mark.line -= 1 end_mark.column -= 1 node.span = Span.from_marks(start_mark, end_mark) return node
def make_transposition(cls, node, input_type, in_transposition): """Transpose a record or scalar into an array.""" # if node is scalar, convert it to array if node.implementation == DataNode.Implementation.scalar: return cls._expand_value_to_array(node) if len(node.children)==1 and len(node.children[0].children)==1 and \ cls._is_dup(node, input_type): # automatic conversion to array with one element is fitted return cls._expand_value_to_array(node) if in_transposition: if not cls._shift_array(node, input_type): # only automatic conversion to array with one element is permited notification = Notification.from_name( 'UnsupportedTranspositionInTransposition', input_type['base_type']) notification.span = node.span notification_handler.report(notification) return node cls.init() # verify that subtype is record subtype = input_type['subtype'] if subtype['base_type'] != 'Record' and \ ( subtype['base_type'] != 'Abstract' or \ not hasattr(node, 'type') or \ node.type.value not in input_type['subtype']['implementations'] ): notification = Notification.from_name('UnsupportedTransposition', input_type['base_type']) notification.span = node.span notification_handler.report(notification) return node assert node.implementation == DataNode.Implementation.mapping,\ "Can not perform transposition on array" # get array size try: cls._get_transformation_array_size(node, subtype) except Notification as notification: notification_handler.report(notification) return node if cls.array_size is None: cls.array_size = 1 cls._prepare_transformation(node, subtype) # create array array_node = SequenceDataNode(node.key, node.parent) array_node.span = node.span array_node.input_type = node.input_type array_node.origin = DataNode.Origin.ac_array template_node = deepcopy(node) template_node.parent = array_node template_node.input_type = subtype template_node.origin = DataNode.Origin.ac_transposition # create and transpose items of the array for i in range(cls.array_size): child_node = deepcopy(template_node) child_node.key = TextValue(str(i)) # convert array to value for path in cls.paths_to_convert: node_to_convert = child_node.get_node_at_path(path) if i >= len(node_to_convert.children) and len( node_to_convert.children) != 1: converted_node = ScalarDataNode(node_to_convert.key, node_to_convert.parent, node_to_convert.value) converted_node.span = Span(node_to_convert.span.start, node_to_convert.span.end) else: if path in cls.paths_to_dup: converted_node = node_to_convert elif len(node_to_convert.children) == 1: converted_node = node_to_convert.children[0] elif path in cls.paths_to_dup: converted_node = node_to_convert else: converted_node = node_to_convert.children[i] converted_node.parent = node_to_convert.parent converted_node.key = node_to_convert.key if converted_node.type is None or 'keys' not in subtype or \ converted_node.key.value not in subtype['keys']: converted_node.type = node_to_convert.type node_to_convert.parent.set_child(converted_node) array_node.children.append(child_node) return array_node