コード例 #1
0
def test_tokenize_whitespace():
    token = tokenize_json('{ }')
    expected = DictToken({}, 0, 2)
    assert token == expected
    assert token.get_value() == {}

    token = tokenize_json('{ "a" :  1 }')
    expected = DictToken({
        ScalarToken('a', 2, 4): ScalarToken(1, 9, 9)
    }, 0, 11)
    assert token == expected
    assert token.get_value() == {"a": 1}
    assert token["a"].get_value() == 1
    assert token["a"].start_index == 9
    assert token["a"].end_index == 9
    assert token.get_key("a").get_value() == "a"
    assert token.get_key("a").start_index == 2
    assert token.get_key("a").end_index == 4
コード例 #2
0
def test_tokenize_object():
    token = tokenize_yaml(YAML_OBJECT)
    expected = DictToken({
        ScalarToken('a', 1, 1): ListToken([
            ScalarToken(1, 8, 8),
            ScalarToken(2, 14, 14),
            ScalarToken(3, 20, 20)
        ], 6, 21),
        ScalarToken('b', 22, 22): ScalarToken('test', 25, 30)
    }, 1, 31)
    assert token == expected
コード例 #3
0
def test_tokenize_object():
    token = tokenize_json('{"a": [1, 2, 3], "b": "test"}')
    expected = DictToken({
        ScalarToken('a', 1, 3): ListToken([
            ScalarToken(1, 7, 7),
            ScalarToken(2, 10, 10),
            ScalarToken(3, 13, 13)
        ], 6, 14),
        ScalarToken('b', 17, 19): ScalarToken('test', 22, 27)
    }, 0, 28)
    assert token == expected
    assert token.get_value() == {"a": [1, 2, 3], "b": "test"}
    assert token["a"].get_value() == [1, 2, 3]
    assert token["a"].start_index == 6
    assert token["a"].end_index == 14
    assert token["a"].start.line_no == 1
    assert token["a"].start.column_no == 7
    assert token.get_key("a").get_value() == "a"
    assert token.get_key("a").start_index == 1
    assert token.get_key("a").end_index == 3
コード例 #4
0
    def _scan_once(string, idx):
        try:
            nextchar = string[idx]
        except IndexError:
            raise StopIteration(idx) from None

        if nextchar == '"':
            value, end = parse_string(string, idx + 1, strict)
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == '{':
            value, end = parse_object((string, idx + 1), strict, _scan_once,
                                      memo, content)
            return DictToken(value, idx, end - 1, content), end
        elif nextchar == '[':
            value, end = parse_array((string, idx + 1), _scan_once)
            return ListToken(value, idx, end - 1, content), end
        elif nextchar == 'n' and string[idx:idx + 4] == 'null':
            value, end = None, idx + 4
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == 't' and string[idx:idx + 4] == 'true':
            value, end = True, idx + 4
            return ScalarToken(value, idx, end - 1, content), end
        elif nextchar == 'f' and string[idx:idx + 5] == 'false':
            value, end = False, idx + 5
            return ScalarToken(value, idx, end - 1, content), end

        m = match_number(string, idx)
        if m is not None:
            integer, frac, exp = m.groups()
            if frac or exp:
                res = parse_float(integer + (frac or '') + (exp or ''))
            else:
                res = parse_int(integer)
            value, end = res, m.end()
            return ScalarToken(value, idx, end - 1, content), end
        else:
            raise StopIteration(idx)
コード例 #5
0
 def construct_mapping(loader, node):
     start = node.start_mark.index
     end = node.end_mark.index
     mapping = loader.construct_mapping(node)
     return DictToken(mapping, start, end - 1, content=content)