def test_non_declared_attrs_fail(self): with pytest.raises(ParseError, match="non_declared_attr"): DummyEntity.validate( Node({ Node("required"): Node("value"), Node("non_declared_attr"): Node(3), }))
def test_seq_dump(self): node = Node([Node(1), Node("a")]) assert node.dump() == ("list(:0:0)[\n" " int(:0:0)(1),\n" " str(:0:0)(a)\n" "]")
def test_dict_is_ok(self): obj = Map(Base).parse(Node({ Node("a"): Node("b"), })) assert len(obj.data) == 1 assert obj.data["a"].data == "b"
def normalize(cls, yaml_node): if not isinstance(yaml_node.value, (str, dict)): cls.abort("Invalid repository data. Expected string or dict.", yaml_node.loc) if isinstance(yaml_node.value, str): return Node({Node("url"): yaml_node}) return yaml_node
def normalize(cls, yaml_node): if not isinstance(yaml_node.value, (str, dict)): cls.abort("Expected string or map.", yaml_node.loc) if isinstance(yaml_node.value, str): return Node({ Node("type"): Node("tosca.artifacts.File"), Node("file"): yaml_node, }) return yaml_node
def test_build_attrs(self): obj = DummyEntity.build(Node({ Node("required"): Node(3), Node("optional"): Node(2), })) assert isinstance(obj, DummyEntity) assert isinstance(obj.data["required"], DummyAttr) assert isinstance(obj.data["optional"], DummyAttr)
def normalize(cls, yaml_node): if not isinstance(yaml_node.value, (str, dict)): cls.abort( "Invalid import data. Expected string or dict.", yaml_node.loc, ) if isinstance(yaml_node.value, str): return Node({Node("file"): yaml_node}) return yaml_node
def test_seq_dump_padded(self): node = Node([ Node(1, Location("s", 1, 1)), Node("a", Location("k", 2, 3)), ], Location("u", 5, 6)) assert node.dump(" ") == ("list(u:5:6)[\n" " int(s:1:1)(1),\n" " str(k:2:3)(a)\n" " ]")
def normalize(cls, yaml_node): # Let the validator handle non-dict case if not isinstance(yaml_node.value, dict): return yaml_node # Make sure we have derived_from key for k in yaml_node.value: if k.value == "derived_from": return yaml_node # Create default derived_from spec if missing data = {Node("derived_from"): Node("None")} data.update(yaml_node.value) return Node(data, yaml_node.loc)
def test_constructor(self): node = Node("value", Location("name", 1, 2)) assert node.value == "value" assert node.loc.stream_name == "name" assert node.loc.line == 1 assert node.loc.column == 2
def test_build_creates_new_instance(self, data): obj = Base.build(Node(data, Location("stream", 1, 2))) assert obj.data == data assert obj.loc.stream_name == "stream" assert obj.loc.line == 1 assert obj.loc.column == 2
def test_string_normalization(self): obj = ArtifactDefinition.normalize(Node("string")) assert obj.bare == { "type": "tosca.artifacts.File", "file": "string", }
def test_empty_loc_constructor(self): node = Node("value") assert node.value == "value" assert node.loc.stream_name == "" assert node.loc.line == 0 assert node.loc.column == 0
def test_dict_is_ok(self): obj = OrderedMap(Base).parse(Node([ Node({Node("a"): Node("b")}), Node({Node("c"): Node("d")}), ])) data = [(k, v.data) for k, v in obj.data.items()] assert data == [("a", "b"), ("c", "d")]
def test_dump_nested(self): node = Node({Node(True): Node([Node(1), Node("a")])}) assert node.dump() == ("dict(:0:0){\n" " bool(:0:0)(True): list(:0:0)[\n" " int(:0:0)(1),\n" " str(:0:0)(a)\n" " ]\n" "}")
def normalize(cls, yaml_node): if not isinstance(yaml_node.value, dict): cls.abort("TOSCA document should be a map.", yaml_node.loc) # Filter out dsl_definitions, since they are preprocessor construct. return Node( { k: v for k, v in yaml_node.value.items() if k.value != "dsl_definitions" }, yaml_node.loc)
def parse(self, yaml_node): if not isinstance(yaml_node.value, list): raise ParseError("Expected list of single-key maps.", yaml_node.loc) data = collections.OrderedDict() for item in yaml_node.value: if not isinstance(item.value, dict) or len(item.value) != 1: raise ParseError("Expected single-key map.", item.loc) (k, v), = item.value.items() data[k] = v return super().parse(Node(data, yaml_node.loc))
def test_string_normalization(self): obj = OperationImplementationDefinition.normalize(Node("string")) assert obj.bare == {"primary": "string"}
def test_invalid_timestamp(self, timestamp): with pytest.raises(ParseError, match="timestamp"): Timestamp.validate(Node(timestamp))
def test_valid_timestamp(self, timestamp): Timestamp.validate(Node(timestamp))
def test_parse(self, path): assert Reference(*path).parse(Node("name")).section_path == path
def test_dict_normalization(self): node = Node({}) obj = OperationImplementationDefinition.normalize(node) assert obj == node
def test_str(self): assert str(Node(0, Location("a", 1, 2))) == "Node(a:1:2)[0]"
def test_base_nested(self): node = Node({ Node(1): Node([Node(1), Node("a")]), }) assert node.bare == {1: [1, "a"]}
def test_bare_map(self): assert Node({Node(1): Node("a")}).bare == {1: "a"}
def test_bare_seq(self): assert Node([Node(1), Node("a")]).bare == [1, "a"]
def test_bare_scalar(self): assert Node(1).bare == 1
def test_noop_for_dicts(self): node = Node({}) assert ImportDefinition.normalize(node) == node
def test_invalid_data(self, data): with pytest.raises(ParseError): OperationImplementationDefinition.normalize(Node(data))
def test_failed_normalization(self, data): with pytest.raises(ParseError, match="string or dict"): ImportDefinition.normalize(Node(data))