def gen_isa(isa, fmt): # First assign numbers to relevant instruction predicates and generate the # check_instp() function.. emit_instps(isa.all_instps, fmt) # Level1 tables, one per CPU mode level1_tables = dict() # Tables for enclists with comments. seq_table = UniqueSeqTable() doc_table = defaultdict(list) # Single table containing all the level2 hash tables. level2_hashtables = list() level2_doc = defaultdict(list) for cpumode in isa.cpumodes: level2_doc[len(level2_hashtables)].append(cpumode.name) level1 = make_tables(cpumode) level1_tables[cpumode] = level1 encode_enclists(level1, seq_table, doc_table, isa) encode_level2_hashtables(level1, level2_hashtables, level2_doc) # Level 1 table encodes offsets into the level 2 table. level1_offt = offset_type(len(level2_hashtables)) # Level 2 tables encodes offsets into seq_table. level2_offt = offset_type(len(seq_table.table)) emit_enclists(seq_table, doc_table, fmt) emit_level2_hashtables(level2_hashtables, level2_offt, level2_doc, fmt) for cpumode in isa.cpumodes: emit_level1_hashtable(cpumode, level1_tables[cpumode], level1_offt, fmt) emit_recipe_names(isa, fmt)
def gen_descriptors(sgrp, fmt): """ Generate the DESCRIPTORS and ENUMERATORS tables. """ enums = UniqueSeqTable() with fmt.indented( 'static DESCRIPTORS: [detail::Descriptor; {}] = ['.format( len(sgrp.settings)), '];'): for idx, setting in enumerate(sgrp.settings): setting.descriptor_index = idx with fmt.indented('detail::Descriptor {', '},'): fmt.line('name: "{}",'.format(setting.name)) fmt.line('offset: {},'.format(setting.byte_offset)) if isinstance(setting, BoolSetting): fmt.line( 'detail: detail::Detail::Bool {{ bit: {} }},'.format( setting.bit_offset)) elif isinstance(setting, NumSetting): fmt.line('detail: detail::Detail::Num,') elif isinstance(setting, EnumSetting): offs = enums.add(setting.values) fmt.line('detail: detail::Detail::Enum ' + '{{ last: {}, enumerators: {} }},'.format( len(setting.values) - 1, offs)) else: raise AssertionError("Unknown setting kind") with fmt.indented( 'static ENUMERATORS: [&\'static str; {}] = ['.format( len(enums.table)), '];'): for txt in enums.table: fmt.line('"{}",'.format(txt)) def hash_setting(s): return constant_hash.simple_hash(s.name) hash_table = constant_hash.compute_quadratic(sgrp.settings, hash_setting) with fmt.indented( 'static HASH_TABLE: [u16; {}] = ['.format(len(hash_table)), '];'): for h in hash_table: if h is None: fmt.line('0xffff,') else: fmt.line('{},'.format(h.descriptor_index))
def gen_descriptors(sgrp, fmt): """ Generate the DESCRIPTORS and ENUMERATORS tables. """ enums = UniqueSeqTable() with fmt.indented("static DESCRIPTORS: [detail::Descriptor; {}] = [".format(len(sgrp.settings)), "];"): for idx, setting in enumerate(sgrp.settings): setting.descriptor_index = idx with fmt.indented("detail::Descriptor {", "},"): fmt.line('name: "{}",'.format(setting.name)) fmt.line("offset: {},".format(setting.byte_offset)) if isinstance(setting, BoolSetting): fmt.line("detail: detail::Detail::Bool {{ bit: {} }},".format(setting.bit_offset)) elif isinstance(setting, NumSetting): fmt.line("detail: detail::Detail::Num,") elif isinstance(setting, EnumSetting): offs = enums.add(setting.values) fmt.line( "detail: detail::Detail::Enum " + "{{ last: {}, enumerators: {} }},".format(len(setting.values) - 1, offs) ) else: raise AssertionError("Unknown setting kind") with fmt.indented("static ENUMERATORS: [&'static str; {}] = [".format(len(enums.table)), "];"): for txt in enums.table: fmt.line('"{}",'.format(txt)) def hash_setting(s): return constant_hash.simple_hash(s.name) hash_table = constant_hash.compute_quadratic(sgrp.settings, hash_setting) with fmt.indented("static HASH_TABLE: [u16; {}] = [".format(len(hash_table)), "];"): for h in hash_table: if h is None: fmt.line("0xffff,") else: fmt.line("{},".format(h.descriptor_index))
def gen_isa(isa, fmt): # type: (TargetISA, srcgen.Formatter) -> None # Make the `RECIPE_PREDICATES` table. emit_recipe_predicates(isa, fmt) # Make the `INST_PREDICATES` table. emit_inst_predicates(isa.instp_number, fmt) # Level1 tables, one per CPU mode level1_tables = dict() # Tables for enclists with comments. seq_table = UniqueSeqTable() doc_table = defaultdict(list) # type: DefaultDict[int, List[str]] # Single table containing all the level2 hash tables. level2_hashtables = list() # type: List[EncList] level2_doc = defaultdict(list) # type: DefaultDict[int, List[str]] for cpumode in isa.cpumodes: level2_doc[len(level2_hashtables)].append(cpumode.name) level1 = make_tables(cpumode) level1_tables[cpumode] = level1 encode_enclists(level1, seq_table, doc_table, isa) encode_level2_hashtables(level1, level2_hashtables, level2_doc) # Level 1 table encodes offsets into the level 2 table. level1_offt = offset_type(len(level2_hashtables)) # Level 2 tables encodes offsets into seq_table. level2_offt = offset_type(len(seq_table.table)) emit_enclists(seq_table, doc_table, fmt) emit_level2_hashtables(level2_hashtables, level2_offt, level2_doc, fmt) for cpumode in isa.cpumodes: emit_level1_hashtable( cpumode, level1_tables[cpumode], level1_offt, fmt) emit_recipe_names(isa, fmt) emit_recipe_constraints(isa, fmt) emit_recipe_sizing(isa, fmt) # Finally, tie it all together in an `EncInfo`. with fmt.indented('pub static INFO: isa::EncInfo = isa::EncInfo {', '};'): fmt.line('constraints: &RECIPE_CONSTRAINTS,') fmt.line('sizing: &RECIPE_SIZING,') fmt.line('names: &RECIPE_NAMES,')
def gen_type_constraints(fmt, instrs): # type: (srcgen.Formatter, Sequence[Instruction]) -> None """ Generate value type constraints for all instructions. - Emit a compact constant table of ValueTypeSet objects. - Emit a compact constant table of OperandConstraint objects. - Emit an opcode-indexed table of instruction constraints. """ # Table of TypeSet instances. type_sets = UniqueTable() # Table of operand constraint sequences (as tuples). Each operand # constraint is represented as a string, one of: # - `Concrete(vt)`, where `vt` is a value type name. # - `Free(idx)` where `idx` isan index into `type_sets`. # - `Same`, `Lane`, `AsBool` for controlling typevar-derived constraints. operand_seqs = UniqueSeqTable() # Preload table with constraints for typical binops. operand_seqs.add(['Same'] * 3) fmt.comment('Table of opcode constraints.') with fmt.indented( 'const OPCODE_CONSTRAINTS: [OpcodeConstraints; {}] = ['.format( len(instrs)), '];'): for i in instrs: # Collect constraints for the value results, not including # `variable_args` results which are always special cased. constraints = list() ctrl_typevar = None ctrl_typeset = typeset_limit if i.is_polymorphic: ctrl_typevar = i.ctrl_typevar ctrl_typeset = type_sets.add(ctrl_typevar.type_set) for idx in i.value_results: constraints.append( get_constraint(i.outs[idx], ctrl_typevar, type_sets)) for opnum in i.value_opnums: constraints.append( get_constraint(i.ins[opnum], ctrl_typevar, type_sets)) offset = operand_seqs.add(constraints) fixed_results = len(i.value_results) fixed_values = len(i.value_opnums) # Can the controlling type variable be inferred from the designated # operand? use_typevar_operand = i.is_polymorphic and i.use_typevar_operand # Can the controlling type variable be inferred from the result? use_result = (fixed_results > 0 and i.outs[i.value_results[0]].typevar == ctrl_typevar) # Are we required to use the designated operand instead of the # result? requires_typevar_operand = use_typevar_operand and not use_result fmt.comment('{}: fixed_results={}, use_typevar_operand={}, ' 'requires_typevar_operand={}, fixed_values={}'.format( i.camel_name, fixed_results, use_typevar_operand, requires_typevar_operand, fixed_values)) fmt.comment('Constraints={}'.format(constraints)) if i.is_polymorphic: fmt.comment('Polymorphic over {}'.format( ctrl_typevar.type_set)) # Compute the bit field encoding, c.f. instructions.rs. assert fixed_results < 8, "Bit field encoding too tight" flags = fixed_results if use_typevar_operand: flags |= 8 if requires_typevar_operand: flags |= 0x10 assert fixed_values < 8, "Bit field encoding too tight" flags |= fixed_values << 5 with fmt.indented('OpcodeConstraints {', '},'): fmt.line('flags: {:#04x},'.format(flags)) fmt.line('typeset_offset: {},'.format(ctrl_typeset)) fmt.line('constraint_offset: {},'.format(offset)) fmt.line() gen_typesets_table(fmt, type_sets) fmt.line() fmt.comment('Table of operand constraint sequences.') with fmt.indented( 'const OPERAND_CONSTRAINTS: [OperandConstraint; {}] = ['.format( len(operand_seqs.table)), '];'): for c in operand_seqs.table: fmt.line('OperandConstraint::{},'.format(c))
def gen_descriptors(sgrp, fmt): # type: (SettingGroup, srcgen.Formatter) -> None """ Generate the DESCRIPTORS, ENUMERATORS, and PRESETS tables. """ enums = UniqueSeqTable() with fmt.indented( 'static DESCRIPTORS: [detail::Descriptor; {}] = [' .format(len(sgrp.settings) + len(sgrp.presets)), '];'): for idx, setting in enumerate(sgrp.settings): setting.descriptor_index = idx with fmt.indented('detail::Descriptor {', '},'): fmt.line('name: "{}",'.format(setting.name)) fmt.line('offset: {},'.format(setting.byte_offset)) if isinstance(setting, BoolSetting): fmt.line( 'detail: detail::Detail::Bool {{ bit: {} }},' .format(setting.bit_offset)) elif isinstance(setting, NumSetting): fmt.line('detail: detail::Detail::Num,') elif isinstance(setting, EnumSetting): offs = enums.add(setting.values) fmt.line( 'detail: detail::Detail::Enum ' + '{{ last: {}, enumerators: {} }},' .format(len(setting.values)-1, offs)) else: raise AssertionError("Unknown setting kind") for idx, preset in enumerate(sgrp.presets): preset.descriptor_index = len(sgrp.settings) + idx with fmt.indented('detail::Descriptor {', '},'): fmt.line('name: "{}",'.format(preset.name)) fmt.line('offset: {},'.format(idx * sgrp.settings_size)) fmt.line('detail: detail::Detail::Preset,') with fmt.indented( 'static ENUMERATORS: [&str; {}] = [' .format(len(enums.table)), '];'): for txt in enums.table: fmt.line('"{}",'.format(txt)) def hash_setting(s): # type: (Union[Setting, Preset]) -> int return constant_hash.simple_hash(s.name) hash_elems = [] # type: List[Union[Setting, Preset]] hash_elems.extend(sgrp.settings) hash_elems.extend(sgrp.presets) hash_table = constant_hash.compute_quadratic(hash_elems, hash_setting) with fmt.indented( 'static HASH_TABLE: [u16; {}] = [' .format(len(hash_table)), '];'): for h in hash_table: if h is None: fmt.line('0xffff,') else: fmt.line('{},'.format(h.descriptor_index)) with fmt.indented( 'static PRESETS: [(u8, u8); {}] = [' .format(len(sgrp.presets) * sgrp.settings_size), '];'): for preset in sgrp.presets: fmt.comment(preset.name) for mask, value in preset.layout(): fmt.format('(0b{:08b}, 0b{:08b}),', mask, value)
def gen_type_constraints(fmt, instrs): # type: (srcgen.Formatter, Sequence[Instruction]) -> None """ Generate value type constraints for all instructions. - Emit a compact constant table of ValueTypeSet objects. - Emit a compact constant table of OperandConstraint objects. - Emit an opcode-indexed table of instruction constraints. """ # Table of TypeSet instances. type_sets = UniqueTable() # Table of operand constraint sequences (as tuples). Each operand # constraint is represented as a string, one of: # - `Concrete(vt)`, where `vt` is a value type name. # - `Free(idx)` where `idx` isan index into `type_sets`. # - `Same`, `Lane`, `AsBool` for controlling typevar-derived constraints. operand_seqs = UniqueSeqTable() # Preload table with constraints for typical binops. operand_seqs.add(['Same'] * 3) fmt.comment('Table of opcode constraints.') with fmt.indented( 'const OPCODE_CONSTRAINTS: [OpcodeConstraints; {}] = [' .format(len(instrs)), '];'): for i in instrs: # Collect constraints for the value results, not including # `variable_args` results which are always special cased. constraints = list() ctrl_typevar = None ctrl_typeset = typeset_limit if i.is_polymorphic: ctrl_typevar = i.ctrl_typevar ctrl_typeset = type_sets.add(ctrl_typevar.type_set) for idx in i.value_results: constraints.append( get_constraint(i.outs[idx], ctrl_typevar, type_sets)) for opnum in i.value_opnums: constraints.append( get_constraint(i.ins[opnum], ctrl_typevar, type_sets)) offset = operand_seqs.add(constraints) fixed_results = len(i.value_results) fixed_values = len(i.value_opnums) # Can the controlling type variable be inferred from the designated # operand? use_typevar_operand = i.is_polymorphic and i.use_typevar_operand # Can the controlling type variable be inferred from the result? use_result = (fixed_results > 0 and i.outs[i.value_results[0]].typevar == ctrl_typevar) # Are we required to use the designated operand instead of the # result? requires_typevar_operand = use_typevar_operand and not use_result fmt.comment( '{}: fixed_results={}, use_typevar_operand={}, ' 'requires_typevar_operand={}, fixed_values={}' .format(i.camel_name, fixed_results, use_typevar_operand, requires_typevar_operand, fixed_values)) fmt.comment('Constraints={}'.format(constraints)) if i.is_polymorphic: fmt.comment( 'Polymorphic over {}'.format(ctrl_typevar.type_set)) # Compute the bit field encoding, c.f. instructions.rs. assert fixed_results < 8, "Bit field encoding too tight" flags = fixed_results if use_typevar_operand: flags |= 8 if requires_typevar_operand: flags |= 0x10 assert fixed_values < 8, "Bit field encoding too tight" flags |= fixed_values << 5 with fmt.indented('OpcodeConstraints {', '},'): fmt.line('flags: {:#04x},'.format(flags)) fmt.line('typeset_offset: {},'.format(ctrl_typeset)) fmt.line('constraint_offset: {},'.format(offset)) fmt.line() gen_typesets_table(fmt, type_sets) fmt.line() fmt.comment('Table of operand constraint sequences.') with fmt.indented( 'const OPERAND_CONSTRAINTS: [OperandConstraint; {}] = [' .format(len(operand_seqs.table)), '];'): for c in operand_seqs.table: fmt.line('OperandConstraint::{},'.format(c))
def gen_type_constraints(fmt, instrs): """ Generate value type constraints for all instructions. - Emit a compact constant table of ValueTypeSet objects. - Emit a compact constant table of OperandConstraint objects. - Emit an opcode-indexed table of instruction constraints. """ # Table of TypeSet instances. type_sets = UniqueTable() # Table of operand constraint sequences (as tuples). Each operand # constraint is represented as a string, one of: # - `Concrete(vt)`, where `vt` is a value type name. # - `Free(idx)` where `idx` isan index into `type_sets`. # - `Same`, `Lane`, `AsBool` for controlling typevar-derived constraints. operand_seqs = UniqueSeqTable() # Preload table with constraints for typical binops. operand_seqs.add(['Same'] * 3) # TypeSet indexes are encoded in 8 bits, with `0xff` reserved. typeset_limit = 0xff fmt.comment('Table of opcode constraints.') with fmt.indented( 'const OPCODE_CONSTRAINTS : [OpcodeConstraints; {}] = [' .format(len(instrs)), '];'): for i in instrs: # Collect constraints for the value results, not including # `variable_args` results which are always special cased. constraints = list() ctrl_typevar = None ctrl_typeset = typeset_limit if i.is_polymorphic: ctrl_typevar = i.ctrl_typevar ctrl_typeset = type_sets.add(ctrl_typevar.type_set) for idx in i.value_results: constraints.append( get_constraint(i.outs[idx], ctrl_typevar, type_sets)) for idx in i.format.value_operands: constraints.append( get_constraint(i.ins[idx], ctrl_typevar, type_sets)) offset = operand_seqs.add(constraints) fixed_results = len(i.value_results) use_typevar_operand = i.is_polymorphic and i.use_typevar_operand fmt.comment( '{}: fixed_results={}, use_typevar_operand={}' .format(i.camel_name, fixed_results, use_typevar_operand)) fmt.comment('Constraints={}'.format(constraints)) if i.is_polymorphic: fmt.comment( 'Polymorphic over {}'.format(ctrl_typevar.type_set)) # Compute the bit field encoding, c.f. instructions.rs. assert fixed_results < 8, "Bit field encoding too tight" flags = fixed_results if use_typevar_operand: flags |= 8 with fmt.indented('OpcodeConstraints {', '},'): fmt.line('flags: {:#04x},'.format(flags)) fmt.line('typeset_offset: {},'.format(ctrl_typeset)) fmt.line('constraint_offset: {},'.format(offset)) fmt.comment('Table of value type sets.') assert len(type_sets.table) <= typeset_limit, "Too many type sets" with fmt.indented( 'const TYPE_SETS : [ValueTypeSet; {}] = [' .format(len(type_sets.table)), '];'): for ts in type_sets.table: with fmt.indented('ValueTypeSet {', '},'): ts.emit_fields(fmt) fmt.comment('Table of operand constraint sequences.') with fmt.indented( 'const OPERAND_CONSTRAINTS : [OperandConstraint; {}] = [' .format(len(operand_seqs.table)), '];'): for c in operand_seqs.table: fmt.line('OperandConstraint::{},'.format(c))
def gen_type_constraints(fmt, instrs): """ Generate value type constraints for all instructions. - Emit a compact constant table of ValueTypeSet objects. - Emit a compact constant table of OperandConstraint objects. - Emit an opcode-indexed table of instruction constraints. """ # Table of TypeSet instances. type_sets = UniqueTable() # Table of operand constraint sequences (as tuples). Each operand # constraint is represented as a string, one of: # - `Concrete(vt)`, where `vt` is a value type name. # - `Free(idx)` where `idx` isan index into `type_sets`. # - `Same`, `Lane`, `AsBool` for controlling typevar-derived constraints. operand_seqs = UniqueSeqTable() # Preload table with constraints for typical binops. operand_seqs.add(['Same'] * 3) # TypeSet indexes are encoded in 8 bits, with `0xff` reserved. typeset_limit = 0xff fmt.comment('Table of opcode constraints.') with fmt.indented( 'const OPCODE_CONSTRAINTS : [OpcodeConstraints; {}] = ['.format( len(instrs)), '];'): for i in instrs: # Collect constraints for the value results, not including # `variable_args` results which are always special cased. constraints = list() ctrl_typevar = None ctrl_typeset = typeset_limit if i.is_polymorphic: ctrl_typevar = i.ctrl_typevar ctrl_typeset = type_sets.add(ctrl_typevar.type_set) for idx in i.value_results: constraints.append( get_constraint(i.outs[idx], ctrl_typevar, type_sets)) for idx in i.format.value_operands: constraints.append( get_constraint(i.ins[idx], ctrl_typevar, type_sets)) offset = operand_seqs.add(constraints) fixed_results = len(i.value_results) use_typevar_operand = i.is_polymorphic and i.use_typevar_operand fmt.comment('{}: fixed_results={}, use_typevar_operand={}'.format( i.camel_name, fixed_results, use_typevar_operand)) fmt.comment('Constraints={}'.format(constraints)) if i.is_polymorphic: fmt.comment('Polymorphic over {}'.format( ctrl_typevar.type_set)) # Compute the bit field encoding, c.f. instructions.rs. assert fixed_results < 8, "Bit field encoding too tight" flags = fixed_results if use_typevar_operand: flags |= 8 with fmt.indented('OpcodeConstraints {', '},'): fmt.line('flags: {:#04x},'.format(flags)) fmt.line('typeset_offset: {},'.format(ctrl_typeset)) fmt.line('constraint_offset: {},'.format(offset)) fmt.comment('Table of value type sets.') assert len(type_sets.table) <= typeset_limit, "Too many type sets" with fmt.indented( 'const TYPE_SETS : [ValueTypeSet; {}] = ['.format( len(type_sets.table)), '];'): for ts in type_sets.table: with fmt.indented('ValueTypeSet {', '},'): ts.emit_fields(fmt) fmt.comment('Table of operand constraint sequences.') with fmt.indented( 'const OPERAND_CONSTRAINTS : [OperandConstraint; {}] = ['.format( len(operand_seqs.table)), '];'): for c in operand_seqs.table: fmt.line('OperandConstraint::{},'.format(c))