def generate_types(): """ Generates and writes the C++ code for internal thing types, such as the text and number classes Additionally, writes the symbol maps for the generated types """ for name, path in definitions.INTERNAL_SOURCES.items(): ast = pipeline.preprocess(SourceContext(path)) symbol_map = SymbolMapper(ast)[name] symbol_map.convention = Symbol.INTERNAL text_cast = CastTag(Identifier('text')) if text_cast not in symbol_map: symbol_map.lookup[text_cast] = Symbol.noop(text_cast, Identifier('text'), implicit=True) for symbol in symbol_map: symbol.convention = Symbol.INTERNAL write_if_changed( os.path.join(SYMBOLS_TARGET, f'{name}.thingsymbols'), json.dumps(symbol_map.serialize(), cls=JSONSerializer, indent=4, sort_keys=True))
def test_parameterization_propagation(): symbols = get_symbols(SOURCE_FULL) generic_type = symbols[Identifier('Person')][Identifier( 'favorite_numbers')].type parametrized_type = symbols[generic_type] assert parametrized_type.name == GenericIdentifier( Identifier('Pair'), (Identifier('number'), ))
def validate_thing_definition(node, name, extends=None, generics=None): assert isinstance(node, ThingDefinition) assert node.name == (name if isinstance(name, Identifier) else Identifier(name)) assert node.extends == (Identifier(extends) if extends else None) assert node.generics == ([Identifier(x) for x in generics] if generics else None)
def test_method_parameterization(): pair_number = get_parametrized() validate_method(pair_number[Identifier('set_values')], 'number', ['number'] * 2, 1) validate_method(pair_number[Identifier('nested_param')], GenericIdentifier.wrap('list', 'number'), [GenericIdentifier.wrap('list', 'number')], 2)
def test_iteration_loop_parsing(): loop = parse_local('for number n in numbers') assert isinstance(loop, IterationLoop) assert loop.target == Identifier('n') assert loop.target_type == Identifier('number') assert loop.collection == Identifier('numbers')
def validate_method(method: Symbol, type, arguments, index, static=False): assert method.type == (Identifier(type) if isinstance(type, str) else type), (method.type, type) assert method.index == index assert method.arguments == [Identifier(x) if isinstance(x, str) else x for x in arguments] assert method.static is static assert method.kind is Symbol.METHOD assert method.visibility is Symbol.PUBLIC
def test_person_member_symbol_description(): symbols = get_symbols(SOURCE_PERSON) person = symbol_map_sanity(symbols, 'Person', ('name', 'age', 'location', 'walk_to', 'say_hello', 'shout', 'favorite_numbers')) validate_member(person[Identifier('name')], Identifier('text'), 0) validate_member(person[Identifier('location')], Identifier('Location'), 2)
def internal_call(target): return OpcodeCallInternal.from_reference( SYMBOL_MAPPER.resolve_named([ CastTag(Identifier(x[3:])) if x.startswith('as ') else Identifier(x) for x in target.split('.') ], generic_validation=False))
def test_chained_call(): call = parse_local('counter.increment().add(10)') validate_types(call.arguments, [NumericValue]) assert call.target == NamedAccess([ MethodCall(NamedAccess([ Identifier('counter'), Identifier('increment') ])), Identifier('add') ])
def validate_assignment(node, type, name, value): assert isinstance(node, AssignmentOperation) assert node.name == Identifier(name) assert node.name.type == (Identifier(type) if type else None) assert node.intent == (AssignmentOperation.DECELERATION if type else AssignmentOperation.REASSIGNMENT) if value in (MethodCall, BinaryOperation): assert isinstance(node.value, value)
def test_generic_parsing(): node = parse_local('list<number> l = [1, 2, 3]') assert isinstance(node, AssignmentOperation) assert isinstance(node.name, Identifier) assert isinstance(node.name.type, GenericIdentifier) assert node.name == Identifier('l') assert node.name.type.value == Identifier('list') assert node.name.type.generics == (Identifier('number'), )
def load_identifier(value): """ Parse a generic identifier """ if isinstance(value, str): return Identifier(value) elif isinstance(value, list): return GenericIdentifier(Identifier(value[0]), tuple(Identifier(x) for x in value[1])) elif isinstance(value, dict) and value['intent'] == 'cast': return CastTag(Symbol.load_identifier(value['type']))
def test_nested_member_parameterization(): pair_number = get_parametrized() validate_member(pair_number[Identifier('parts')], GenericIdentifier.wrap('list', 'number'), 2) validate_member( pair_number[Identifier('nested')], GenericIdentifier.wrap( 'list', GenericIdentifier.wrap('list', GenericIdentifier.wrap('list', 'number'))), 3)
def compile(self, context: CompilationBuffer): if not self.values: return buffer = context.optional() ref = self[0].compile(buffer) # TODO: remove unnecessary recompilation of first element (used to infer type) list_type = GenericIdentifier(Identifier('list'), (ref.type,)) last_call = MethodCall(NamedAccess([list_type, Identifier.constructor()])).deriving_from(self) for value in self: # TODO: validate list is homogeneous, and descend to lowest common type last_call = MethodCall(NamedAccess([last_call, Identifier("append")]), ArgumentList([value])).deriving_from(self) return last_call.compile(context)
def test_argument_count_mismatch(): with pytest.raises(NoMatchingOverload) as e: pipeline.compile( SourceContext.wrap(BASE.format(code="self.no_args(1)"))) assert e.value.methods[0].name == Identifier( 'no_args') and e.value.arguments == [NumericValue(1)] with pytest.raises(NoMatchingOverload) as e: pipeline.compile( SourceContext.wrap(BASE.format(code="self.two_args(1)"))) assert e.value.methods[0].name == Identifier( 'two_args') and e.value.arguments == [NumericValue(1)]
def entry(self) -> Reference: """ Get the index of the program's entry point """ return self.resolve( NamedAccess([Identifier('Program'), Identifier.constructor()]), {})
def finalize_buffer(buffer: str, terminating_char, entity_class, source_ref) -> LexicalToken: """ Finalize a character buffer into a lexical token :param buffer: the characters collected thus far :param terminating_char: the character which caused the buffer termination (not included in buffer) :param entity_class: the current entity being collected (generally, a type of quote, or none) :param source_ref: a reference to the source from which these tokens were derived """ if buffer in KEYWORDS: return KEYWORDS[buffer]( buffer, source_ref) if KEYWORDS[buffer].EMITTABLE else None if buffer.isdigit(): return NumericValue(buffer, source_ref) if terminating_char == '"': if entity_class is not LexicalQuote: raise ValueError("Unexpected end of string") return InlineString(buffer, source_ref) if terminating_char == '`': if entity_class is not LexicalBacktick: raise ValueError("Unexpected end of inline code") return InlineString(buffer, source_ref) if Identifier.validate(buffer): if entity_class in (LexicalQuote, LexicalBacktick): raise ValueError("String was not closed") return Identifier(buffer, source_ref) if buffer: raise ValueError('Lexer: cannot terminate group "{}" (at {})'.format( buffer, source_ref))
def validate_member(member: Symbol, type, index, static=False): assert member.type == (Identifier(type) if isinstance(type, str) else type) assert member.index == index assert member.static is static assert member.kind is Symbol.MEMBER assert member.visibility is Symbol.PUBLIC
def get_selection(*target_types): selector = BASE.element.selector(CONTEXT) for target_type in target_types: selector.constraint(Reference(Identifier(target_type))) return selector.disambiguate(None)
def from_serialized(cls, code, argument_names, argument_types): ast = preprocess.preprocess(SourceContext.wrap(code)) return cls( ast.children, ArgumentList([ Identifier(name, type_name=arg_type) for name, arg_type in zip(argument_names, argument_types) ]))
def compile( self, context: CompilationBuffer ): # TODO: we should probably reparse the maps into identifiers method_call = MethodCall( NamedAccess.extend(self.lhs, Identifier(self.operator.serialize())), [self.rhs]) return method_call.compile(context)
def __init__(self, target: Identifier, target_type: Identifier, collection: ValueType): super().__init__(None, (target, target_type, collection)) self.target, self.target_type, self.collection = target, target_type, collection self.iterator_id = next(IterationLoop.TRANSIENT_COUNTER) self.iterator = self.iterator_container_name[0] self.continuation_check = MethodCall( NamedAccess.extend(self.iterator, Identifier('has_next'))).deriving_from(self) self.continuation_next = MethodCall( NamedAccess.extend(self.iterator, Identifier('next'))).deriving_from(self) self.value = self.continuation_check if isinstance(self.collection, MethodCall): self.collection.is_captured = True
def normalize_id(param): if isinstance(param, str): return Identifier(param) if isinstance(param, int): return NumericValue(param) if isinstance(param, (tuple, list)): return [normalize_id(x) for x in param] return param
def compile(self, context: CompilationBuffer): iterator_name, iterator_type = self.iterator_container_name AssignmentOperation( AssignmentOperation.REASSIGNMENT, iterator_name, MethodCall(NamedAccess.extend(self.collection, Identifier('iterator')), is_captured=True).deriving_from(self), iterator_type, ).deriving_from(self).compile(context) super().compile(context)
def validate_method_definition(node, name, expected_arguments=(), return_type=None): assert isinstance(node, MethodDefinition) assert node.name == (name if isinstance(name, Identifier) else Identifier(name)) assert node.return_type == return_type for actual_argument, expected_argument in zip(node.arguments, expected_arguments): assert actual_argument.value == expected_argument[0] assert actual_argument.type.value == expected_argument[1]
class NumericValue(PrimitiveType): """ An inline numeric value """ TYPE = Identifier("number") PRIMITIVE_ID = definitions.PRIMITIVE_TYPES.index('number') def __init__(self, value, source_ref=None): super(NumericValue, self).__init__(int(value), source_ref) def serialize(self): return struct.pack('<ii', NumericValue.PRIMITIVE_ID, self.value)
def test_argument_type_mismatch(): with pytest.raises(NoMatchingOverload) as e: pipeline.compile( SourceContext.wrap(BASE.format(code='self.two_args("hello", 3)'))) assert e.value.methods[0].name == Identifier( 'two_args') and e.value.arguments == [ InlineString("hello"), NumericValue(3) ] pipeline.compile( SourceContext.wrap(BASE.format( code='self.two_args(3, 3)'))) # Can be implicitly casted
def from_serialized(cls, data: dict) -> 'SymbolMap': """ Reads a serialized symbol map and returns a new SymbolMap object. Additionally, deserializes its symbols into Symbol objects """ symbols = [Symbol.load(elem) for elem in data['symbols']] members = [ symbol for symbol in symbols if symbol.kind == Symbol.MEMBER ] methods = [ symbol for symbol in symbols if symbol.kind == Symbol.METHOD ] extends = Symbol.load_identifier( data['extends']) if data['extends'] else None return cls(members=members, methods=methods, name=Identifier(data['name']), extends=extends, generics=[Identifier(x) for x in data['generics']], convention=Symbol.serialize_convention(data['convention']), member_offset=data['offsets']['members'], method_offset=data['offsets']['methods'])
class LexicalBoolean(PrimitiveType): """ The base boolean type. """ TYPE = Identifier('bool') PRIMITIVE_ID = definitions.PRIMITIVE_TYPES.index('bool') def __init__(self, value, source_ref=None): super(LexicalBoolean, self).__init__(value, source_ref) self.value = bool(value) def serialize(self): return struct.pack('<iB', LexicalBoolean.PRIMITIVE_ID, self.value)
def test_method_symbol_description(): symbols = get_symbols(SOURCE_FULL) person, location, pair = symbols[Identifier('Person')], symbols[Identifier( 'Location')], symbols[Identifier('Pair')] validate_method(person[Identifier('walk_to')], None, ['Location'], 1) validate_method(location[Identifier('distance')], 'number', ['Location'] * 2, 1, True) validate_method(pair[Identifier('set_values')], 'T', ['T'] * 2, 1)