示例#1
0
def test_tokenize_list():
    token = tokenize_yaml(YAML_LIST)
    expected = ListToken(
        [
            ScalarToken(True, 3, 6),
            ScalarToken(False, 10, 14),
            ScalarToken(None, 18, 21),
        ],
        1,
        22,
    )
    assert token == expected
示例#2
0
def test_tokenize_list():
    token = tokenize_json("[true, false, null]")
    expected = ListToken(
        [ScalarToken(True, 1, 4), ScalarToken(False, 7, 11), ScalarToken(None, 14, 17)],
        0,
        18,
    )
    assert token == expected
    assert token.value == [True, False, None]
    assert token.lookup([0]).value is True
    assert token.lookup([0]).string == "true"
    assert token.lookup([0]).start.char_index == 1
    assert token.lookup([0]).end.char_index == 4
示例#3
0
def test_tokenize_floats():
    token = tokenize_json("[100.0, 1.0E+2, 1E+2]")
    expected = ListToken(
        [
            ScalarToken(100.0, 1, 5),
            ScalarToken(100.0, 8, 13),
            ScalarToken(100.0, 16, 19),
        ],
        0,
        20,
    )
    assert token == expected
    assert token.value == [100.0, 1.0e2, 1e2]
    assert token.lookup([0]).value == 100.0
    assert token.lookup([0]).string == "100.0"
    assert token.lookup([0]).start.char_index == 1
    assert token.lookup([0]).end.char_index == 5
示例#4
0
def test_tokenize_whitespace():
    token = tokenize_json("{ }")
    expected = DictToken({}, 0, 2)
    assert token == expected
    assert token.value == {}
    assert token.string == "{ }"

    token = tokenize_json('{ "a" :  1 }')
    expected = DictToken({ScalarToken("a", 2, 4): ScalarToken(1, 9, 9)}, 0, 11)
    assert token == expected
    assert token.value == {"a": 1}
    assert token.lookup(["a"]).value == 1
    assert token.lookup(["a"]).string == "1"
    assert token.lookup(["a"]).start.char_index == 9
    assert token.lookup(["a"]).end.char_index == 9
    assert token.lookup_key(["a"]).value == "a"
    assert token.lookup_key(["a"]).string == '"a"'
    assert token.lookup_key(["a"]).start.char_index == 2
    assert token.lookup_key(["a"]).end.char_index == 4
示例#5
0
def test_tokenize_object():
    token = tokenize_json('{"a": [1, 2, 3], "b": "test"}')
    expected = DictToken(
        {
            ScalarToken("a", 1, 3):
            ListToken(
                [
                    ScalarToken(1, 7, 7),
                    ScalarToken(2, 10, 10),
                    ScalarToken(3, 13, 13)
                ],
                6,
                14,
            ),
            ScalarToken("b", 17, 19):
            ScalarToken("test", 22, 27),
        },
        0,
        28,
    )
    assert repr(token) == 'DictToken(\'{"a": [1, 2, 3], "b": "test"}\')'
    assert token == expected
    assert token.value == {"a": [1, 2, 3], "b": "test"}
    assert token.lookup(["a"]).value == [1, 2, 3]
    assert token.lookup(["a"]).string == "[1, 2, 3]"
    assert token.lookup(["a"]).start.line_no == 1
    assert token.lookup(["a"]).start.column_no == 7
    assert token.lookup_key(["a"]).value == "a"
    assert token.lookup_key(["a"]).string == '"a"'
    assert token.lookup_key(["a"]).start.char_index == 1
    assert token.lookup_key(["a"]).end.char_index == 3
示例#6
0
    def _scan_once(string: str, idx: int) -> typing.Tuple[Token, int]:
        try:
            nextchar = string[idx]
        except IndexError:
            raise StopIteration(idx) from None

        if nextchar == '"':
            value, end = parse_string(string, idx + 1, strict)
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == "{":
            value, end = parse_object((string, idx + 1), strict, _scan_once,
                                      memo, content)
            return DictToken(value, idx, end - 1, content), end
        elif nextchar == "[":
            value, end = parse_array((string, idx + 1), _scan_once)
            return ListToken(value, idx, end - 1, content), end
        elif nextchar == "n" and string[idx:idx + 4] == "null":
            value, end = None, idx + 4
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == "t" and string[idx:idx + 4] == "true":
            value, end = True, idx + 4
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == "f" and string[idx:idx + 5] == "false":
            value, end = False, idx + 5
            return ScalarToken(value, idx, end - 1, content), end

        m = match_number(string, idx)
        if m is not None:
            integer, frac, exp = m.groups()
            if frac or exp:
                res = parse_float(integer + (frac or "") + (exp or ""))
            else:
                res = parse_int(integer)
            value, end = res, m.end()
            return ScalarToken(value, idx, end - 1, content), end
        else:  # pragma: no cover
            raise StopIteration(idx)
示例#7
0
def test_tokenize_object():
    token = tokenize_yaml(YAML_OBJECT)
    expected = DictToken(
        {
            ScalarToken("a", 1, 1):
            ListToken(
                [
                    ScalarToken(1, 8, 8),
                    ScalarToken(2, 14, 14),
                    ScalarToken(3, 20, 20)
                ],
                6,
                21,
            ),
            ScalarToken("b", 22, 22):
            ScalarToken("test", 25, 30),
        },
        1,
        31,
    )
    assert token == expected
示例#8
0
def test_tokenize_floats():
    token = tokenize_yaml(YAML_FLOATS)
    expected = ListToken(
        [ScalarToken(100.0, 3, 7),
         ScalarToken(100.0, 11, 16)], 1, 17)
    assert token == expected
示例#9
0
 def construct_null(loader: "yaml.Loader", node: "yaml.Node") -> ScalarToken:
     start = node.start_mark.index
     end = node.end_mark.index
     value = loader.construct_yaml_null(node)
     return ScalarToken(value, start, end - 1, content=str_content)
示例#10
0
def _TokenizingJSONObject(
    s_and_end: typing.Tuple[str, int],
    strict: bool,
    scan_once: typing.Callable[[str, int], typing.Tuple[Token, int]],
    memo: dict,
    content: str,
    _w: typing.Callable = WHITESPACE.match,
    _ws: str = WHITESPACE_STR,
) -> typing.Tuple[dict, int]:
    s, end = s_and_end
    pairs = []  # type: typing.List[typing.Tuple[Token, Token]]
    pairs_append = pairs.append
    memo_get = memo.setdefault
    # Use a slice to prevent IndexError from being raised, the following
    # check will raise a more specific ValueError if the string is empty
    nextchar = s[end:end + 1]
    # Normally we expect nextchar == '"'
    if nextchar != '"':
        if nextchar in _ws:
            end = _w(s, end).end()
            nextchar = s[end:end + 1]
        # Trivial empty object
        if nextchar == "}":
            return {}, end + 1
        elif nextchar != '"':
            raise JSONDecodeError(
                "Expecting property name enclosed in double quotes", s, end)
    end += 1
    while True:
        start = end - 1
        key, end = scanstring(s, end, strict)
        key = memo_get(key, key)
        key = ScalarToken(memo_get(key, key), start, end - 1, content)
        # To skip some function call overhead we optimize the fast paths where
        # the JSON key separator is ": " or just ":".
        if s[end:end + 1] != ":":
            end = _w(s, end).end()
            if s[end:end + 1] != ":":
                raise JSONDecodeError("Expecting ':' delimiter", s, end)
        end += 1

        try:
            if s[end] in _ws:
                end += 1
                if s[end] in _ws:
                    end = _w(s, end + 1).end()
        except IndexError:
            pass

        try:
            value, end = scan_once(s, end)
        except StopIteration as err:
            raise JSONDecodeError("Expecting value", s, err.value) from None
        pairs_append((key, value))
        try:
            nextchar = s[end]
            if nextchar in _ws:
                end = _w(s, end + 1).end()
                nextchar = s[end]
        except IndexError:
            nextchar = ""
        end += 1

        if nextchar == "}":
            break
        elif nextchar != ",":
            raise JSONDecodeError("Expecting ',' delimiter", s, end - 1)
        end = _w(s, end).end()
        nextchar = s[end:end + 1]
        end += 1
        if nextchar != '"':
            raise JSONDecodeError(
                "Expecting property name enclosed in double quotes", s,
                end - 1)
    return dict(pairs), end