Esempio n. 1
0
File: base.py Progetto: nak/pyllars
 def guarded(self, stream : TextIOBase):
     stream.write(("""
         #ifndef __%(guard)s__
         #define __%(guard)s__
 
         """ % {'guard': self.element.guard}).encode('utf-8'))
     yield stream
     stream.write(b"""
         #endif
     """)
Esempio n. 2
0
def diff(result_file: io.TextIOBase, answer_file: io.TextIOBase) -> bool:
    res_lines = result_file.readlines()
    ans_lines = answer_file.readlines()

    if len(res_lines) != len(ans_lines):
        return False

    for res_line in res_lines:
        for ans_line in ans_lines:
            if res_line != ans_line:
                return False
    return True
Esempio n. 3
0
File: base.py Progetto: nak/pyllars
    def _ns_scope(self, stream: TextIOBase):
        need_closure = False
        if self.parent:
            # recurse upward
            with self.parent._ns_scope(stream) as scoped:
                if not self.element.is_template_macro and not isinstance(self.element, (code_structure.FieldDecl,
                                                                                        code_structure.CXXMethodDecl,
                                                                                        code_structure.BuiltinType,
                                                                                        code_structure.TypedefDecl)):
                    need_closure = True
                    scoped.write(("\nnamespace %s{\n" % qualified_name(self.element.name)).encode("utf-8"))
                yield stream
        else:
            # at top level
            stream.write(b"\nnamespace pyllars{\n")
            if not self.element.is_template_macro and not isinstance(self.element, (code_structure.FieldDecl,
                                                                                    code_structure.CXXMethodDecl,
                                                                                    code_structure.BuiltinType,
                                                                                    code_structure.TypedefDecl)):
                need_closure = True
                stream.write(("\nnamespace %s{\n" % qualified_name(self.element.name)).encode("utf-8"))
            yield stream
            stream.write(b"\n}")

        if need_closure:
            stream.write(b"\n}")
Esempio n. 4
0
File: base.py Progetto: nak/pyllars
 def generate_header_core(self, stream: TextIOBase, as_top=False):
     if self.element.is_anonymous_type:  # anonymous directly inaccessible type
         stream.write(b"")
         return
     stream.write(("""
             status_t %(basic_name)s_register( pyllars::Initializer* const);
             
             status_t %(basic_name)s_init(PyObject * const global_mod);
           
         """ % {
             'name': self.element.name,
             'basic_name': self.sanitize(self.element.name or "anonymous-%s" % self.element.tag),
             'parent_basic_name': self.element.parent.name if (self.element.parent and self.element.parent.name) else "pyllars",
             'pyllars_scope': self.element.pyllars_scope,
             'parent_name': self.element.parent.name if self.element.parent else "pyllars",
     }).encode('utf-8'))
Esempio n. 5
0
	def read_file_handle(self, handle: io.TextIOBase) -> 'None':
		lines = handle.readlines()
		""":type : list[str]"""
		for line in lines:
			splitline = line.rstrip().split("\t")
			if len(splitline) != 2:
				continue
			self.counts[splitline[0]] = int(splitline[1])
		return
Esempio n. 6
0
def char_splitter(buffer: io.TextIOBase, delim):
    buf = ""
    eof = False
    while True:
        chunk = buffer.readline(1024)
        if not chunk:
            eof = True

        buf += chunk
        split = buf.split(delim)
        if len(split) > 0:
            if split[-1].endswith(delim) or eof:
                for s in split:
                    yield s
                buf = ""
            else:
                for s in split[0:-1]:
                    yield s
                buf = split[-1]

        if eof:
            break
Esempio n. 7
0
def regex_splitter(buffer: io.TextIOBase, pattern):
    buf = ""
    eof = False

    regex = re.compile(pattern)
    while True:
        chunk = buffer.readline(1024)
        if not chunk:
            eof = True

        buf += chunk
        split = regex.split(buf)
        if len(split) > 0:
            if regex.match(split[-1]) or eof:
                for s in split:
                    yield s
                buf = ""
            else:
                for s in split[0:-1]:
                    yield s
                buf = split[-1]

        if eof:
            break
Esempio n. 8
0
 def __init__(self, buf, tag):
     TextIOBase.__init__(self)
     self.buf = buf
     self.tag = tag
Esempio n. 9
0
 def _to_stream(data: str, stream: TextIOBase) -> str:
     stream.write(data)
     return data
Esempio n. 10
0
def translate_statement(f: ast.stmt,
                        dst: io.TextIOBase,
                        vars: set,
                        indent="") -> None:
    unq = random.randint(0, 2**32)
    if isinstance(f, ast.Assign):
        assert len(f.targets) == 1, "Can't do tuple assignment."
        # Auto-declare direct scalars the first time they are assigned
        # TODO : This is buggy, as if a variable is first references in a local scope, it won't
        # be available outside
        decl = ""
        if isinstance(f.targets[0], ast.Name):
            if f.targets[0].id not in vars:
                decl = "auto "
                vars.add(f.targets[0].id)
        dst.write("{}{}{} = {};\n".format(indent, decl,
                                          translate_expression(f.targets[0]),
                                          translate_expression(f.value)))
    elif isinstance(f, ast.Pass):
        pass
    elif isinstance(f, ast.AugAssign):
        op = translate_operator(f.op)
        dst.write("{}{} {}= {};\n".format(indent,
                                          translate_expression(f.target), op,
                                          translate_expression(f.value)))
    elif isinstance(f, ast.If):
        dst.write("{}{{\n".format(indent))
        dst.write("{}  bool cond_{}=({});\n".format(
            indent, unq, translate_expression(f.test)))
        dst.write("{}  if(cond_{}){{\n".format(indent, unq))
        for s in f.body:
            translate_statement(s, dst, vars, indent + "    ")
        dst.write("{}  }}else{{\n".format(indent))
        for s in f.orelse:
            translate_statement(s, dst, vars, indent + "    ")
        dst.write("{}  }}\n".format(indent))
        dst.write("{}}}\n".format(indent))
    elif isinstance(f, ast.For):
        var = extract_loop_var(f.target)
        count = extract_loop_range(f.iter)
        dst.write("{}for(unsigned {var}=0; {var}<{count}; {var}++){{\n".format(
            indent, var=var, count=count))
        for s in f.body:
            translate_statement(s, dst, vars, indent + "  ")
        assert len(f.orelse) == 0  # Who uses this construct???
        dst.write("{}}}\n".format(indent))
    elif isinstance(f, ast.Expr):
        dst.write("{}{};\n".format(indent, translate_expression(f.value)))
    elif isinstance(f, ast.Assert):
        dst.write("{}assert({});\n".format(indent,
                                           translate_expression(f.test)))
    else:
        raise RuntimeError("unsupported statement {}".format(type(f)))
Esempio n. 11
0
 def save(self, to: TextIOBase):
     return to.write(self.json(by_alias=True, indent=4, exclude_none=True))
Esempio n. 12
0
    def ingest(self, f: TextIOBase, *, unresolved_hold_notes=None):
        if unresolved_hold_notes is None:
            unresolved_hold_notes = set()
        # key is a tuple of note circle and board position
        # value is the following note circle it is resolved by
        hold_notes_resolved_by = {note: None for note in unresolved_hold_notes}
        note_positions_in_board = dict()
        note_positions_in_time = dict()
        # while set(note_positions_in_board.keys()) != set(note_positions_in_time.keys()) or not note_positions_in_board:
        lines = []
        board_lines_seen = 0
        rhythm_lines_seen = 0
        while board_lines_seen < 4 or rhythm_lines_seen < 4:
            l_raw = f.readline()
            if not l_raw:
                return None, unresolved_hold_notes
            l = l_raw.strip()
            if not l:
                continue
            if (l[0] in self.CharacterMap.POSITION_MAP
                    or l[0] in self.CharacterMap.NOTE_MAP):
                lines.append(l)
                board_lines_seen += 1
                if self.CharacterMap.NAMES["BAR"] in l:
                    rhythm_lines_seen += 1
        for l, line in enumerate(lines):
            pattern_section = line[:4]
            for i, note in enumerate(pattern_section):
                if note in self.CharacterMap.NOTE_MAP:
                    # dont append yet: we are not sure if this is actually end of hold note
                    unresolved_hold_notes = {
                        hold_note
                        for hold_note, v in hold_notes_resolved_by.items()
                        if v is None
                    }
                    position = ((l % 4) * 4) + i
                    is_ending_hold = False
                    for hold_note_start, hold_position in unresolved_hold_notes:
                        if position == hold_position:
                            # it is ending the previous hold note!
                            is_ending_hold = True
                            hold_notes_resolved_by[(hold_note_start,
                                                    position)] = note
                            break
                    if not is_ending_hold:
                        positions = note_positions_in_board.get(note, list())
                        positions.append(position)
                        note_positions_in_board[note] = positions
                else:
                    continue
                # is it a hold note?
                # check left
                is_hold_note = False
                if i > 0:
                    valid_chars = {
                        self.CharacterMap.NAMES["HOLD_STEM_HORIZONTAL"],
                        self.CharacterMap.NAMES["HOLD_DIRECTION_RIGHT"],
                    }
                    if pattern_section[i - 1] in valid_chars:
                        is_hold_note = True
                # check right
                if i < 3:
                    valid_chars = {
                        self.CharacterMap.NAMES["HOLD_STEM_HORIZONTAL"],
                        self.CharacterMap.NAMES["HOLD_DIRECTION_LEFT"],
                    }
                    if pattern_section[i + 1] in valid_chars:
                        is_hold_note = True
                # check above
                if l % 4 > 0:
                    valid_chars = {
                        self.CharacterMap.NAMES["HOLD_STEM_VERTICAL"],
                        self.CharacterMap.NAMES["HOLD_DIRECTION_DOWN"],
                    }
                    above_pattern_section = lines[l - 1][:4]
                    if above_pattern_section[i] in valid_chars:
                        is_hold_note = True
                # check below
                if l % 4 < 3:
                    valid_chars = {
                        self.CharacterMap.NAMES["HOLD_STEM_VERTICAL"],
                        self.CharacterMap.NAMES["HOLD_DIRECTION_UP"],
                    }
                    below_pattern_section = lines[l - 1][:4]
                    if below_pattern_section[i] in valid_chars:
                        is_hold_note = True
                if is_hold_note:
                    position = ((l % 4) * 4) + i
                    # marked as unresolved
                    hold_notes_resolved_by[(note, position)] = None

            try:
                rhythm_section = line[4:].strip()
                increment_size = round(1 / (len(rhythm_section) - 2), 2)
                for t, note in enumerate(rhythm_section[1:-1]):
                    if note in self.CharacterMap.NOTE_MAP:
                        beat_number = self.current_beat + (l % 4) + (
                            t * increment_size)
                        note_positions_in_time[note] = beat_number
            except IndexError:
                pass
        patterns = []
        for note, local_beat in note_positions_in_time.items():
            p = Pattern()
            for position in note_positions_in_board[note]:
                if (note, position) in hold_notes_resolved_by:
                    p.add_hold(position)
                    continue
                else:
                    p.add(position)
            for key, hold_end_note in hold_notes_resolved_by.items():
                hold_start_note, hold_position = key
                hold_start_beat = note_positions_in_time.get(
                    hold_start_note, -1)
                hold_end_beat = note_positions_in_time.get(
                    hold_end_note, local_beat)
                if hold_start_beat < local_beat <= hold_end_beat:
                    p.add_hold_tick(hold_position)
            patterns.append((local_beat, p))
        patterns.sort(key=lambda i: i[0])
        unresolved_hold_notes = {
            hold_note
            for hold_note, v in hold_notes_resolved_by.items() if v is None
        }
        return patterns, unresolved_hold_notes
Esempio n. 13
0
 def writeline(writer: io.TextIOBase, line=None, noindent=False):
     # if line:
     #     writer.write(('' if noindent else (indentstr*indent)) + line)
     write(line, noindent)
     writer.write('\n')
Esempio n. 14
0
 def stream2selection(self, stream: TextIOBase) -> Selection:
     return self.str2selection(stream.read())
Esempio n. 15
0
def split_seq(length, header, sequence, stream: io.TextIOBase):
	"""Split sequence around N regions of given minimum length.
	Sub-sequence headers are appended with "_part_#_<start-end>"
	NOTE: assumes there is an outfile to write to!
	"""
	# Generate a pattern:
	pattern_string = "N{%d,}" % length
	pattern = re.compile(pattern_string)
	
	# Get an iterator containing all (non-overlapping) matches for pattern:
	n_regions = list(pattern.finditer(sequence))
	
	last_end = 0	# End of last match; starts at beginning of sequence
	count = 0
	if n_regions:
		# Split sequence around N regions of specified length
		for r in n_regions:
			# Get coordinates of N region
			(start, end) = r.span(0)
			# Get subsequence from end of last match to start of this
			subseq = sequence[last_end:start]
			# Avoid printing empty sequences:
			if len(subseq) > 0:
				# Print subsequence FASTA record
				# NOTE: uses a TextIOWrapper, provided by argparser, to write out
				# NOTE: ASSUMES THERE IS A FILE TO WRITE TO
				stream.write("%s_part_%d_coords_%d-%d_newlen_%d\n" % (header, count, last_end, start, start-last_end))
				stream.write(subseq + "\n")
				# Update subsequence count
				count += 1
			last_end = end
			
		# Write final subsequence (end of last match to end of sequence)
		subseq = sequence[last_end:]
		# Again, avoid printing empty subsequences:
		if len(subseq) > 0:
			stream.write("%s_part_%d_coords_%d-%d_newlen_%d\n" % (header, count, last_end, len(sequence), len(sequence)-last_end))
			stream.write(subseq + "\n")
	else:
		# No sufficiently long N regions: just print sequence
		stream.write(header + "\n")
		stream.write(sequence + "\n")
		pass
Esempio n. 16
0
def render_graph_instance_as_opencl(gi: GraphInstance, dst: io.TextIOBase):
    gt = gi.graph_type

    #################################################################
    ## Initial hacking to fix topology params
    device_types_by_index = list(gt.device_types.values())
    devs = list(gi.device_instances.values())
    for (i, d) in enumerate(devs):
        # Naughty: hacking in the address
        d.address = i
        d.device_type_index = device_types_by_index.index(d.device_type)

    incoming_edges = {}  # Map of dst_address -> [ edge_instance ]
    outgoing_edges = {}  # Map of (src_address,src_pin) -> [ edge_instance ]

    for ei in gi.edge_instances.values():  # type: EdgeInstance
        incoming = incoming_edges.setdefault(ei.dst_device.id, [])
        outgoing = outgoing_edges.setdefault(
            (ei.src_device.id, ei.src_pin.name), [])
        incoming_index = len(incoming)
        ei.incoming_index = incoming_index  # Naughty: patch it in
        incoming.append(ei)
        outgoing.append(ei)

    #####################################################################
    # Write out all the landing zones
    for d in devs:  # type: DeviceType
        dst.write(f"""
            message_payload_t {d.id}__payloads[{len(d.device_type.outputs_by_index)}];
            POETS_ATOMIC_INT {d.id}__ref_counts[{len(d.device_type.outputs_by_index)}] = {{ { ",".join(["POETS_ATOMIC_VAR_INIT(0)" for i in range(len(d.device_type.outputs_by_index))]) } }};     

            """)

    ######################################################################
    # Do incoming edges
    for d in devs:
        incoming = incoming_edges.get(d.id, [])

        ## First handle any properties/state

        for ei in incoming:
            if ei.properties != None:
                dst.write(f"""
                    const {ei.dst_device.device_type.id}_{ei.dst_pin.name}_properties_t {ei.dst_device.id}_{ei.dst_pin.name}_{ei.incoming_index}__properties = 
                        { typed_struct_instance_as_init(ei.dst_pin.properties,ei.properties) };
                    """)
            if ei.state != None:
                dst.write(f"""
                    const {ei.dst_device.device_type.id}_{ei.dst_pin.name}_state_t {ei.dst_device.id}_{ei.dst_pin.name}_{ei.incoming_index}__state = 
                        { typed_struct_instance_as_init(ei.dst_pin.state,ei.state) };
                    """)

        ## Then deal with hookups into graph

        make_incoming_entry = lambda ei: f"""
            {{
                {ei.dst_pin.index}, //uint pin_index;
                {ei.src_device.id}__payloads + {ei.src_pin.index}, //message_payload_t *payload;
                {ei.src_device.id}__ref_counts + {ei.src_pin.index}, //uint *ref_count;
                {"0" if ei.properties==None else f"&{ei.dst_device.id}_{ei.dst_pin.name}_{ei.incoming_index}__properties" }, //const void *edge_properties;
                {"0" if ei.state==None else f"&{ei.dst_device.id}_{ei.dst_pin.name}_{ei.incoming_index}__state" } //void *edge_state;
            }}
            """

        dst.write(f"""
            incoming_edge {d.id}__incoming_edges[{len(incoming)}] =
            {{
                {
                    ",".join([ make_incoming_entry(ei) for ei in incoming ])
                }
            }};
            POETS_ATOMIC_INT {d.id}__incoming_landing_bits[{len(incoming)}] = {{ 0 }};
            """)
        incoming = None

    ####################################################################
    # do outgoing edges and ports
    for d in devs:
        # Each outgoing edge identifies a bit to set in the target bit mask
        make_outgoing_entry = lambda ei: f"""{{ {ei.dst_device.id}__incoming_landing_bits, {ei.incoming_index} }}\n"""

        for op in d.device_type.outputs_by_index:
            outgoing = outgoing_edges.get((d.id, op.name), [])
            dst.write(f"""
                outgoing_edge {d.id}_p{op.index}__outgoing_edges[] = {{
                    {",".join(make_outgoing_entry(ei) for ei in outgoing)}
                }};
                """)
            outgoing = None

        make_outgoing_port = lambda op: f"""
        {{
            {len( outgoing_edges.get( (d.id,op.name), []) ) }, //unsigned num_outgoing_edges;
            {d.id}_p{op.index}__outgoing_edges, //outgoing_edge *outgoing_edges;
            {d.id}__payloads+{op.index}, //message_payload_t *payload;
        }}
        """

        dst.write(f"""
output_pin {d.id}__output_pins[{len(d.device_type.outputs)}] =
{{
    {",".join(make_outgoing_port(op) for op in d.device_type.outputs_by_index)}
}};
        """)

    ##################################################################################
    ## Properties and state
    for d in devs:
        dst.write(
            f"{gt.id}_{d.device_type.id}_properties_t {d.id}__properties={ typed_struct_instance_as_init(d.device_type.properties, d.properties) };\n"
        )
        dst.write(
            f"{gt.id}_{d.device_type.id}_state_t {d.id}__state={ typed_struct_instance_as_init(d.device_type.state, d.state) };\n"
        )

    #####################################################################################
    ## Device info

    dst.write("__global device_info devices[]={\n")
    for (i, d) in enumerate(devs):
        if i != 0:
            dst.write(",\n")
        dst.write(f"""
    {{
        {d.address}, // address
        {len( incoming_edges.get(d.id,[]) ) }, //uint num_incoming_edges;
        {d.id}__incoming_edges, //incoming_edge *incoming_edges;  // One entry per incoming edge
        {d.id}__incoming_landing_bits, //uint *incoming_landing_bit_mask; // One bit per incoming edge (keep globally mutable seperate from local)

        {len(d.device_type.outputs)}, //unsigned num_output_pins;
        {d.id}__output_pins, //const output_pin *output_pins;  // One entry per pin
        {d.id}__ref_counts, //uint *output_ref_counts; // One counter per pin (keep globally mutable seperate from local )

        {d.device_type_index}, //unsigned device_type_index;
        &{d.id}__properties, //const void *device_properties;
        &{d.id}__state //void *device_state;
    }}    
""")
    dst.write("};\n")

    dst.write(f"""
    {gt.id}_properties_t G_properties={typed_struct_instance_as_init(gi.graph_type.properties, gi.properties)};

    __kernel void kinit()
{{
    init(&G_properties, devices);
}}

__kernel void kstep(unsigned count)
{{
    for(unsigned i=0; i<count;i++){{
        step(&G_properties, devices);
    }}
}}

const uint __TOTAL_DEVICES__={len(devs)};
""")
Esempio n. 17
0
 def print(self, target: Target, file: TextIOBase) -> None:
     file.write(target.path)
     file.write(':')
     for prerequisite in target.prerequisites:
         file.write(' ')
         file.write(prerequisite)
     first_oo_prerequisite = True
     for oo_prerequisite in target.order_only_prerequisites:
         if first_oo_prerequisite:
             file.write(' |')
             first_oo_prerequisite = False
         file.write(' ')
         file.write(oo_prerequisite)
     has_recipe_lines = False
     for recipe_line in target.recipe_lines:
         file.write('\n\t')
         file.write(recipe_line)
         if not has_recipe_lines:
             has_recipe_lines = True
     if not has_recipe_lines:
         file.write(' ;')
     file.write('\n')
Esempio n. 18
0
def render_graph_type_as_opencl(gt: GraphType, dst: io.TextIOBase):
    ##############################################################
    ## Patch in stdint.h types

    dst.write("""
typedef char int8_t;
typedef unsigned char uint8_t;
typedef short int16_t;
typedef unsigned short uint16_t;
typedef int int32_t;
typedef unsigned int uint32_t;
typedef long int64_t;
typedef unsigned long uint64_t;


""")

    ###############################################################
    ## Do all the structs

    render_typed_data_spec_as_struct(gt.properties, f"{gt.id}_properties_t",
                                     dst)
    for mt in gt.message_types.values():  # type:MessageType
        render_typed_data_spec_as_struct(mt.message, f"{mt.id}_message_t", dst)

    for dt in gt.device_types.values():  # type:DeviceType
        render_typed_data_spec_as_struct(dt.properties,
                                         f"{gt.id}_{dt.id}_properties_t", dst)
        render_typed_data_spec_as_struct(dt.state, f"{gt.id}_{dt.id}_state_t",
                                         dst)
        for ip in dt.inputs_by_index:
            render_typed_data_spec_as_struct(
                ip.properties, f"{dt.id}_{ip.name}_properties_t", dst)
            render_typed_data_spec_as_struct(ip.state,
                                             f"{dt.id}_{ip.name}_state_t", dst)

    dst.write(f"typedef {gt.id}_properties_t GRAPH_PROPERTIES_T;")

    #################################################################
    ## RTS flag enums

    for dt in gt.device_types.values():  # type:DeviceType
        dst.write(f"enum {dt.id}_RTS_FLAGS {{")
        dst.write(",".join(f"RTS_FLAG_{dt.id}_{op.name} = 1<<{i}"
                           for (i, op) in enumerate(dt.outputs_by_index)))
        if len(dt.outputs_by_index) == 0:
            dst.write(" _fake_RTS_FLAG_to_avoid_emptyness_")
        dst.write("};\n\n")

        dst.write(f"enum {dt.id}_RTS_INDEX {{")
        dst.write(",".join(f"RTS_INDEX_{dt.id}_{op.name} = {i}"
                           for (i, op) in enumerate(dt.outputs_by_index)))
        if len(dt.outputs_by_index) == 0:
            dst.write(" _fake_RTS_INDEX_to_avoid_emptyness_")
        dst.write("};\n\n")

    ##############################################################
    ## Shared code.
    ##
    ## Currently shared code is tricky, as we don't have an easy
    ## mechanism to isolate handlers, so per-device shared code is
    ## hard. In principle this could be done using clCompile and clLink,
    ## but it adds a lot of complexity. It could also be done using
    ## macros, which probably makes more sense but is more than I
    ## can be bothered with now
    ##
    ## For now we just dump all shared code out into the same translation
    ## unit. Any naming conflicts will have to be dealt with by the app writer.

    dst.write("////////////////////////////\n// Graph shared code\n\n")
    for sc in gt.shared_code:
        dst.write(adapt_handler(sc) + "\n")
    for dt in gt.device_types.values():  # type:DeviceType
        dst.write(
            f"////////////////////////////\n// Device {dt.id} shared code\n\n")
        for sc in dt.shared_code:
            dst.write(adapt_handler(sc) + "\n")

    #################################################################
    ## Then the handlers
    ##
    ## We'll emit per device functions in order to deal with things like
    ## per device scope

    for dt in gt.device_types.values():  # type:DeviceType
        shared_prefix = f"""
        typedef {gt.id}_{dt.id}_properties_t {dt.id}_properties_t;
        typedef {gt.id}_{dt.id}_state_t {dt.id}_state_t;
        typedef {dt.id}_properties_t DEVICE_PROPERTIES_T;
        typedef {dt.id}_state_t DEVICE_STATE_T;
        const GRAPH_PROPERTIES_T *graphProperties=(const GRAPH_PROPERTIES_T*)_gpV;
        const DEVICE_PROPERTIES_T *deviceProperties=(const DEVICE_PROPERTIES_T*)_dpV;
        """
        for (i, op) in enumerate(dt.outputs_by_index):
            shared_prefix += f"const uint RTS_FLAG_{op.name} = 1<<{i};"
            shared_prefix += f"const uint RTS_INDEX_{op.name} = {i};"

        dst.write(f"""
        void calc_rts_{dt.id}(uint _dev_address_, const void *_gpV, const void *_dpV, const void *_dsV, uint *readyToSend)
        {{
            {shared_prefix}
            const DEVICE_STATE_T *deviceState=(const DEVICE_STATE_T*)_dsV;
            //////////////////
            {adapt_handler(dt.ready_to_send_handler)}
        }}
        """)

        dst.write(f"""
        void do_init_{dt.id}(uint _dev_address_, const void *_gpV, const void *_dpV, void *_dsV)
        {{
            {shared_prefix}
            DEVICE_STATE_T *deviceState=(DEVICE_STATE_T*)_dsV;
            //////////////////
            {adapt_handler(dt.init_handler)}
        }}
        """)

        assert dt.on_hardware_idle_handler == None or dt.on_hardware_idle_handler.strip(
        ) == "", "Hardware idle not supported yet"
        assert dt.on_device_idle_handler == None or dt.on_device_idle_handler.strip(
        ) == "", "Device idle not supported yet"

        for ip in dt.inputs_by_index:
            dst.write(f"""
            void do_recv_{dt.id}_{ip.name}(uint _dev_address_, const void *_gpV, const void *_dpV, void *_dsV, const void *_epV, void *_esV, const void *_msgV)
            {{
                {shared_prefix}
                DEVICE_STATE_T *deviceState=(DEVICE_STATE_T*)_dsV;
                typedef {ip.message_type.id}_message_t MESSAGE_T;
                typedef {dt.id}_{ip.name}_properties_t EDGE_PROPERTIES_T;
                typedef {dt.id}_{ip.name}_state_t EDGE_STATE_T;
                const EDGE_PROPERTIES_T *edgeProperties=(const EDGE_PROPERTIES_T*)_epV;
                EDGE_STATE_T *edgeState=(EDGE_STATE_T*)_esV;
                const MESSAGE_T *message=(const MESSAGE_T*)_msgV;
                //////////////////
                {adapt_handler(ip.receive_handler)}
            }}
            """)

        dst.write(f"""
        void do_recv_{dt.id}(uint _dev_address_, uint pin_index, const void *_gpV, const void *_dpV, void *_dsV, const void *_epV, void *_esV, const void *_msgV) {{
        """)
        for (i, ip) in enumerate(dt.inputs_by_index):
            dst.write(
                f"  if(pin_index=={i}){{  do_recv_{dt.id}_{ip.name}(_dev_address_, _gpV, _dpV, _dsV, _epV, _esV, _msgV); }} else"
                + "\n")
        dst.write("  { assert(0); } ")
        dst.write("}\n")

        for op in dt.outputs_by_index:
            dst.write(f"""
            void do_send_{dt.id}_{op.name}(uint _dev_address_, const void *_gpV, const void *_dpV, void *_dsV, int *sendIndex, int *doSend,  void *_msgV)
            {{
                {shared_prefix}
                DEVICE_STATE_T *deviceState=(DEVICE_STATE_T*)_dsV;
                typedef {op.message_type.id}_message_t MESSAGE_T;
                MESSAGE_T *message=(MESSAGE_T*)_msgV;
                //////////////////
                {adapt_handler(op.send_handler)}
            }}
            """)

        dst.write(f"""
        void do_send_{dt.id}(uint device_address, uint pin_index, const void *_gpV, const void *_dpV, void *_dsV, int *sendIndex, int *doSend, void *_msgV) {{
        """)

        for (i, ip) in enumerate(dt.outputs_by_index):
            dst.write(
                f"  if(pin_index=={i}){{  do_send_{dt.id}_{ip.name}(device_address, _gpV, _dpV, _dsV, sendIndex, doSend, _msgV); }} else"
                + "\n")
        dst.write("  { assert(0); }\n")
        dst.write("}\n")

    dst.write(
        "void do_init(uint device_address, uint device_type_index, const void *_gpV, const void *_dpV, void *_dsV){\n"
    )
    for (i, dt) in enumerate(gt.device_types.values()):
        dst.write(
            f"    if(device_type_index=={i}){{ do_init_{dt.id}(device_address, _gpV, _dpV, _dsV); }}"
        )
    dst.write("  { assert(0); }\n")
    dst.write("}\n\n")

    dst.write(
        "void calc_rts(uint device_address, uint device_type_index, const void *_gpV, const void *_dpV, void *_dsV, uint *readyToSend){\n"
    )
    for (i, dt) in enumerate(gt.device_types.values()):
        dst.write(
            f"    if(device_type_index=={i}){{ calc_rts_{dt.id}(device_address, _gpV, _dpV, _dsV, readyToSend); }}"
        )
    dst.write("  { assert(0); }\n")
    dst.write("}\n\n")

    dst.write(
        "void do_send(uint device_address, uint device_type_index, uint pin_index, const void *_gpV, const void *_dpV, void *_dsV, int *sendIndex, int *doSend, void *_msgV){\n"
    )
    for (i, dt) in enumerate(gt.device_types.values()):
        dst.write(
            f"    if(device_type_index=={i}){{ do_send_{dt.id}(device_address, pin_index, _gpV, _dpV, _dsV, sendIndex, doSend, _msgV); }}"
        )
    dst.write("  { assert(0); }\n")
    dst.write("}\n\n")

    dst.write(
        "void do_recv(uint device_address, uint device_type_index, uint pin_index, const void *_gpV, const void *_dpV, void *_dsV, const void *_epV, void *_esV, const void *_msgV){\n"
    )
    for (i, dt) in enumerate(gt.device_types.values()):
        dst.write(
            f"    if(device_type_index=={i}){{ do_recv_{dt.id}(device_address, pin_index, _gpV, _dpV, _dsV, _epV, _esV, _msgV); }}"
        )
    dst.write("  { assert(0); }\n")
    dst.write("}\n\n")
Esempio n. 19
0
    def exec_command(self,
                     command: List[str],
                     project_root_dir: str,
                     cwd: str = None,
                     stdout: io.TextIOBase = None,
                     stderr: io.TextIOBase = None) -> None:
        """
        Executes a command under the toolchain environment.

        @param command:
        @param project_root_dir:
        @param stdout:
        @param stderr:
        """

        # TODO: consider reading the output gradually as described here https://stackoverflow.com/a/923127
        toolchain_command = [
            self.get_wrapper_script_abs_path(project_root_dir)
        ]
        toolchain_command += command

        # due to io.UnsupportedOperation: fileno, use this https://stackoverflow.com/a/15374306

        command_stdout = subprocess.PIPE
        command_stderr = subprocess.PIPE

        if stdout:
            if has_fileno(stdout):
                command_stdout = stdout
        elif has_fileno(sys.stdout):
            command_stdout = sys.stdout
        else:
            stdout = sys.stdout

        if stderr:
            if has_fileno(stderr):
                command_stderr = stderr
        elif has_fileno(sys.stderr):
            command_stderr = sys.stderr
        else:
            stderr = sys.stderr

        subprocess.check_output("echo hi", shell=True, universal_newlines=True)

        # TODO: no timeouts, check a solution https://docs.python.org/3/library/subprocess.html#subprocess.Popen.communicate
        # stdout_data, stderr_data = subprocess.Popen(toolchain_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
        #                                             cwd=cwd).communicate()

        # TODO: capture the return types of failured builds to propagate the error core of the tool
        subprocess_call = subprocess.run(toolchain_command,
                                         stdout=command_stdout,
                                         stderr=command_stderr,
                                         cwd=cwd,
                                         encoding='utf-8')

        if stdout and not has_fileno(stdout):
            stdout.write(subprocess_call.stdout)

        if stderr and not has_fileno(stderr):
            stderr.write(subprocess_call.stderr)

        if subprocess_call.returncode != 0:
            raise ToolChainExecFail(
                'execution of toolchain: {} and command: {} failed'.format(
                    self._name, ' '.join(command)))
Esempio n. 20
0
def translate_function(f: ast.FunctionDef, dst: io.TextIOBase):
    # HACK : Moving to template types, to allow parameter types to float

    assert f
    args = get_positional_arguments(f)

    args_types = ["class T{}".format(i) for i in range(len(args))]

    dst.write("template<{}>\n".format(",".join(args_types)))
    dst.write("void kernel_{}(\n".format(f.name))
    for (index, arg) in enumerate(args):
        #assert arg.annotation!=None, "Expecting type annotation"
        #type=arg.annotation
        #assert isinstance(type,ast.Name), "Expecting type annotation to be a name"
        #datatype=None
        dst.write("  T{} {}".format(index, arg.arg))
        if index + 1 < len(args):
            dst.write(",")
        dst.write("\n")
    dst.write("){\n")

    vars = set()
    for statement in f.body:
        translate_statement(statement, dst, vars, indent="  ")

    dst.write("}\n\n")
Esempio n. 21
0
 def _dump_it(self, f: io.TextIOBase) -> io.TextIOBase:
     for k in header_order[:-2]:
         f.write('# {}: {}\n'.format(k, self[k]))
     if len(self['SPK']) > 0:
         for spk in self['SPK']:
             f.write(f'# SPK: {spk}\n')
     else:
         f.write(f'# SPK:\n')
     if self['XZONE'] is not None:
         f.write('# XZONE: {}\n'.format(self['XZONE']))
     f.write('##\n')
     f.write('# Written by a Python PTF class, suspicious.\n#\n')
     if self.comments:
         c = self.comments.splitlines()
         for line in c:
             f.write('# ' + line + '\n')
         f.write('#\n')
     # f.write('# ' + ','.join(x.title() for x in fieldnames) + '\n')
     f.write('# ' + ','.join(map(str, list(range(1,
                                                 len(fieldnames) + 1)))) +
             '\n')
     # f.write('# ' + ','.join(x.capitalize() for x in fieldnames) + '\n')
     f.write('# ' + ','.join(fieldnames) + '\n')
     writer = csv.DictWriter(f,
                             fieldnames=fieldnames,
                             extrasaction='ignore',
                             restval='')
     if (Counter(fieldnames) == Counter(self.fieldnames)):
         for record in self.ptf_recs:
             writer.writerow(record)
     else:
         t = key_translation(fieldnames, self.fieldnames)
         for record in self.ptf_recs:
             writer.writerow({t[k]: v for k, v in record.items()})
     return f
Esempio n. 22
0
def binary_to_textual(source, writer: io.TextIOBase):
    """Convert a file in binary format to textual format."""
    parsed = binary.parse(source)
    writer.write(f'// Digital Circuit File - version {parsed.version}\n')
    if parsed.features:
        raise NotImplementedError('Process features.')
        # writer.write(f'// Features: NYI')

    def name_generator():
        """Yields the name of the next parameter."""
        for next_index in itertools.count():
            index = next_index
            name = ''
            while index > 0:
                index, remainder = divmod(index - 1, 26)
                name = chr(remainder + ord('a')) + name
                yield name

    def format_arguments(arguments, names):
        if len(arguments) > 25:
            raise NotImplementedError('Labelling more than 25 arguments '
                                      'not supported.')

        return ', '.join(f'{arg.lower()} %{name}'
                         for arg, name, in zip(arguments, names))

    if len(parsed.symbols) > 2:
        # For this to work being symbols need to refer to what instructions
        # are for them.
        raise NotImplementedError('Only supports 1 symbol.')

    for symbol in parsed.symbols:
        names = name_generator()

        arguments = format_arguments(symbol.arguments, names)
        # TODO: need to find the return value in the instructions.
        # TODO: Consider putting this in the symbol format or instruction count
        # for the symbol so its O(1) look-up.
        # return_values = format_arguments(symbol.arguments)
        return_values = 'bit sum, bit carry_out'
        writer.write(f'define @{symbol.name}({arguments}) : {return_values}\n')
        writer.write('{\n')

        names = list(itertools.islice(name_generator(), symbol.argument_count))

        for instruction, name in zip(parsed.instructions[symbol.start:],
                                     name_generator()):
            mnemonic = OPCODE_TO_MNEMONIC[instruction.opcode.intvalue]
            operand_a = names[-instruction.operand_a]
            operand_b = names[-instruction.operand_b]

            names.append(name)

            if instruction.opcode.intvalue == Opcode.RETURN:
                writer.write(f'  {mnemonic} %{operand_a} %{operand_b}\n')
                # Only single return per-symbol/function.
                break

            writer.write(f'  %{name} = {mnemonic} %{operand_a} %{operand_b}\n')

        writer.write('}\n')
Esempio n. 23
0
 def output(self, context: CommandContext, out: io.TextIOBase):
     path = context.getAuxiliaryFile("params", ".json")
     with path.open("wt") as fileout:
         context.config.__xpm__.outputjson(fileout, context)
     out.write(context.relpath(path))
Esempio n. 24
0
def write_python_file(writer: io.TextIOBase,
                      hash_items: List[HashSelection],
                      group_items: List[GroupSelection],
                      *,
                      readable: bool = True,
                      sort: bool = True):
    all_names = []
    writer.write('#!/usr/bin/env python3\n')
    writer.write('#-*- coding: utf-8 -*-\n')
    writer.write(
        '"""Known hashes, groups, and callbacks for Majiro  (this file was auto-generated)\n'
    )
    writer.write('\n')
    writer.write('Contains:\n')
    hash_lists = []
    group_lists = []
    for i, item in enumerate(hash_items):
        hash_lists.append(select_hashes(item.hashes, *item.prefixes,
                                        sort=sort))
        writer.write(f' {len(hash_lists[-1]):<3d} {item.name} names\n')
        all_names.extend((item.varname, item.varlookup))
    for i, item in enumerate(group_items):
        group_lists.append(select_groups(item.groups, sort=sort))
        if item.type is int:
            writer.write(f' {len(group_lists[-1]):<3d} {item.name} hashes\n')
        else:
            writer.write(f' {len(group_lists[-1]):<3d} {item.name} names\n')
        all_names.append(item.varname)
    writer.write('"""\n')
    writer.write('\n')
    writer.write(f'__date__    = {__date__!r}\n')
    writer.write(f'__author__  = {__author__!r}\n')
    writer.write('\n')
    writer.write(f'__all__ = {all_names!r}\n')
    writer.write('\n')
    # writer.write('#######################################################################################\n')
    writer.write(f'{PYTHON_HR}\n')
    writer.write('\n')
    writer.write('from typing import Dict, Set\n')

    if hash_items:
        writer.write('\n')
    for i, item in enumerate(hash_items):
        hash_list = hash_lists[i]
        write_python_comment(writer, item.comment)
        writer.write(f'\n{item.varname}:Dict[int,str] = ')
        write_hashes_dict(writer, hash_list, readable=False, python=True)
        writer.write(
            f'\n{item.varlookup}:Dict[str,int] = dict((v,k) for k,v in {item.varname}.items())'
        )
        writer.write('\n')

    if group_items:
        writer.write('\n')
    for i, item in enumerate(group_items):
        group_list = group_lists[i]
        write_python_comment(writer, item.comment)
        writer.write(f'\n{item.varname}:Set[{item.type.__name__}] = ')
        write_groups_list(writer,
                          group_list,
                          item.type is int,
                          readable=False,
                          python=True)
        writer.write('\n')

    # writer.write('\n\n#######################################################################################\n\n')
    writer.write(f'\n\n{PYTHON_HR}\n\n')
    writer.write('del Dict, Set  # cleanup declaration-only imports\n')
Esempio n. 25
0
 def parse_header(self, f: TextIOBase) -> JubeatChart.MetaData:
     title = f.readline().strip()
     artist = f.readline().strip()
     f.readline()
     chart = f.readline().strip()
     f.readline()
     difficulty = float(f.readline().replace("Level:", "").strip())
     bpm = float(f.readline().replace("BPM:", "").strip())
     # note count
     f.readline()
     f.readline()
     return JubeatChart.MetaData(title=title,
                                 artist=artist,
                                 chart=chart,
                                 difficulty=difficulty,
                                 bpm=bpm)
Esempio n. 26
0
def write_hashes_dict(writer: io.TextIOBase,
                      hash_list: List[Tuple[int, str]],
                      *,
                      readable: bool = False,
                      python: bool = False,
                      tab: str = '\t',
                      singlequotes: bool = True,
                      pybraces: Tuple[str, str] = ('{', '}')):
    writer.write(pybraces[0] if python else '{')

    for i, (h, sig) in enumerate(hash_list):
        # comma-separate after first item
        if i: writer.write(',')
        # newline and indent
        if readable: writer.write('\n' + tab)

        if python:  # we don't have to use butt-ugly string hex values
            writer.write(f'0x{h:08x}:')
        else:  # bleh, JSON doesn't support hex OR numeric keys
            writer.write(f'"{h:08x}":')

        # visual space between key and value
        if readable: writer.write(' ')

        if python and singlequotes:  # just use normal-repr single-quotes
            # also a bad hack, because repr does not guarantee one quote or the other
            #  in CPython we trust
            writer.write(repr(sig))  #.fullname))
        else:
            #FIXME: bad hack for double-quotes
            r = repr(sig)[1:-1].replace('\\\'', '\'').replace('\"', '\\\"')
            writer.write(f'"{r}"')  #.fullname)[1:-1]))
        writer.flush()

    # newline before closing brace
    if readable: writer.write('\n')
    writer.write(pybraces[1] if python else '}')
Esempio n. 27
0
 def write(writer: io.TextIOBase, line=None, noindent=False):
     if line:
         writer.write(('' if noindent else (indentstr * indent)) + line)
Esempio n. 28
0
def main(fd: TextIOBase):
    print(sum([calculate_fuel(int(x.rstrip())) for x in fd.readlines()]))
Esempio n. 29
0
def render_graph(devs: List[Dev], dst: io.TextIOBase):
    dst.write('#include "kernel_template.cl"\n')
    dst.write('#include "kernel_type.cl"\n\n')

    #####################################################################
    # Write out all the landing zones
    for d in devs:
        dst.write(f"""
message_payload_t {d.id}__payloads[{len(d.outputs)}];
atomic_uint {d.id}__ref_counts[{len(d.outputs)}] = {{ { ",".join(["ATOMIC_VAR_INIT(0)" for i in range(len(d.outputs))]) } }};     

""")

    ######################################################################
    # Do incoming edges
    for d in devs:
        make_incoming_entry = lambda ei: f"""
        {{
            {ei[0].index}, //uint pin_index;
            {ei[1].owner.id}__payloads + {ei[1].index}, //message_payload_t *payload;
            {ei[1].owner.id}__ref_counts + {ei[1].index}, //uint *ref_count;
            0, //const void *edge_properties;
            0 //void *edge_state;
        }}
        """

        dst.write(f"""
incoming_edge {d.id}__incoming_edges[{len(d.incoming)}] =
{{
    {
        ",".join([ make_incoming_entry(ie) for ie in d.incoming ])
    }
}};
uint {d.id}__incoming_landing_bits[{len(d.incoming)}] = {{ 0 }};
""")

    ####################################################################
    # do outgoing edges and ports
    for d in devs:
        make_outgoing_entry = lambda oe: f"""{{ {oe[0].owner.id}__incoming_landing_bits, {oe[1]} }}\n"""
        for op in d.outputs:
            dst.write(f"""
    outgoing_edge {d.id}_p{op.index}__outgoing_edges[] = {{
        {",".join(make_outgoing_entry(oe) for oe in op.outgoing)}
    }};
    """)

        make_outgoing_port = lambda op: f"""
        {{
            {len(op.outgoing)}, //unsigned num_outgoing_edges;
            {d.id}_p{op.index}__outgoing_edges, //outgoing_edge *outgoing_edges;
            {d.id}__payloads+{op.index}, //message_payload_t *payload;
        }}
        """

        dst.write(f"""
output_pin {d.id}__output_pins[{len(d.outputs)}] =
{{
    {",".join(make_outgoing_port(op) for op in d.outputs)}
}};
        """)

    ##################################################################################
    ## Properties and state
    for d in devs:
        dst.write(
            f"device_properties_t {d.id}__properties={{ {d.inputs[0].degree} }};\n"
        )
        dst.write(f"device_state_t {d.id}__state={{ 0, 0 }};\n")

    #####################################################################################
    ## Device info

    dst.write("__global device_info devices[]={\n")
    for (i, d) in enumerate(devs):
        if i != 0:
            dst.write(",\n")
        dst.write(f"""
    {{
        {d.address}, // address
        {len(d.incoming)}, //uint num_incoming_edges;
        {d.id}__incoming_edges, //incoming_edge *incoming_edges;  // One entry per incoming edge
        {d.id}__incoming_landing_bits, //uint *incoming_landing_bit_mask; // One bit per incoming edge (keep globally mutable seperate from local)

        {len(d.outputs)}, //unsigned num_output_pins;
        {d.id}__output_pins, //const output_pin *output_pins;  // One entry per pin
        {d.id}__ref_counts, //uint *output_ref_counts; // One counter per pin (keep globally mutable seperate from local )

        0, //unsigned device_type_index;
        &{d.id}__properties, //const void *device_properties;
        &{d.id}__state //void *device_state;
    }}    
""")
    dst.write("};\n")

    dst.write(f"""
    graph_properties_t G_properties={{ {max_t} }};

    __kernel void kinit()
{{
    init(&G_properties, devices);
}}

__kernel void kstep(unsigned count)
{{
    for(unsigned i=0; i<count;i++){{
        step(&G_properties, devices);
    }}
}}

const uint __TOTAL_DEVICES__={len(devs)};
""")
Esempio n. 30
0
    def generate_body_proper(self, scoped: TextIOBase, as_top: bool = False) -> None:
        if 'operator delete' in self.element.name or 'operator new' in self.element.name:
            return
        imports = set([])
        for elem in self.element.params:
            if elem and elem.target_type.scope != self.element.scope and elem.target_type.scope != "::":
                imports.add(elem.scope)
        if self.element.return_type \
                and self.element.return_type.scope != self.element.scope \
                and self.element.return_type.scope != "::":
            imports.add(self.element.return_type.namespace.name)

        from .structs import CXXMethodDecl
        scoped.write(("""
            constexpr cstring name = "%(pyname)s";

            //generated from %(file)s.generate_body_proper
            // FUNCTION %(name)s THROWS %(throws)s
            status_t %(pyllars_scope)s::%(name)s::%(name)s_init(PyObject * const global_mod){
               static const char* const argumentNames[] = {%(argument_names)s nullptr};
               status_t status = 0;
               %(imports)s
               %(func_decl)s
               return status;
            }
            status_t %(pyllars_scope)s::%(name)s::%(name)s_register(pyllars::Initializer*){
                //do nothing, functions have no children
                return 0;
            }

            %(pyllars_scope)s::%(basic_name)s::Initializer_%(basic_name)s
            *%(pyllars_scope)s::%(basic_name)s::Initializer_%(basic_name)s::initializer =
            new %(pyllars_scope)s::%(basic_name)s::Initializer_%(basic_name)s();
            
""" % {
            'file': __file__,
            'basic_name': self.element.name,
            'pyllars_scope': self.element.pyllars_scope,
            'imports': "\n".join(
                ["if(!PyImport_ImportModule(\"pylllars.%s\")){PyErr_Clear();} " % n.replace("::", ".") for n in
                 imports if n]),
            'module_name': self.element.parent.python_cpp_module_name if self.parent else "pyllars_mod",
            'name': self.sanitize(self.element.name),
            'pyname': CXXMethodDecl.METHOD_NAMES.get(self.element.name).replace('addMethod', '') if
            self.element.name in CXXMethodDecl.METHOD_NAMES else self.element.name if self.element.name != "operator=" else "assign_to",
            'parent_name': qualified_name
                (self.element.parent.name if (self.element.parent.name and self.element.parent.name != "::")
                                          else "pyllars"),
            'parent': self.element.scope,
            # 'template_decl': template_decl(self),
            # 'template_args': self.element.template_arguments_string(),
            'argument_names': ','.join(["\"%s\"" % (arg.name if arg.name else "_%s" % (index + 1)) for index, arg in
                                        enumerate(self.element.params)]) + (',' if self.element.params else ''),
            'has_varargs': str(self.element.has_varargs).lower(),
            'throws': "" if self.element.throws is None else "void" if len(self.element.throws) == 0
            else ",".join(self.element.throws),
            'func_decl': self._func_declaration() if not self.element.is_template else "",
            'return_type': self.element.return_type_spec,
            'arguments': (',' if len(self.element.params) > 0 else "") + ', '.join([t.target_type.full_name for
                                                                               t in self.element.params]),
        }).encode('utf-8'))
Esempio n. 31
0
def write_groups_list(writer: io.TextIOBase,
                      group_list: List[str],
                      is_hex: bool,
                      *,
                      readable: bool = False,
                      python: bool = False,
                      tab: str = '\t',
                      singlequotes: bool = True,
                      pybraces: Tuple[str, str] = ('[', ']')):
    writer.write(pybraces[0] if python else '[')

    for i, group in enumerate(group_list):
        # comma-separate after first item
        if i: writer.write(',')
        # newline and indent
        if readable: writer.write('\n' + tab)

        if is_hex:
            if python:
                writer.write(f'0x{group:08x}')
            else:
                writer.write(f'"{group:08x}"')
        elif python and singlequotes:  # just use normal-repr single-quotes
            # also a bad hack, because repr does not guarantee one quote or the other
            #  in CPython we trust
            writer.write(repr(group))  #.fullname))
        else:  # json
            #FIXME: bad hack for double-quotes
            r = repr(group)[1:-1].replace('\\\'', '\'').replace('\"', '\\\"')
            writer.write(f'"{r}"')  #.fullname)[1:-1]))
        writer.flush()

    # newline before closing brace
    if readable: writer.write('\n')
    writer.write(pybraces[1] if python else ']')
Esempio n. 32
0
def write_python_parse_table(out: io.TextIOBase, parse_table: ParseTable) -> None:
    # Disable MyPy type checking for everything in this module.
    out.write("# type: ignore\n\n")

    out.write("from jsparagus import runtime\n")
    if any(isinstance(key, Nt) for key in parse_table.nonterminals):
        out.write(
            "from jsparagus.runtime import (Nt, InitNt, End, ErrorToken, StateTermValue,\n"
            "                               ShiftError, ShiftAccept)\n")
    out.write("\n")

    methods: OrderedSet[FunCall] = OrderedSet()

    def write_action(act: Action, indent: str = "") -> typing.Tuple[str, bool]:
        assert not act.is_inconsistent()
        if isinstance(act, Reduce):
            stack_diff = act.update_stack_with()
            out.write("{}replay = [StateTermValue(0, {}, value, False)]\n"
                      .format(indent, repr(stack_diff.nt)))
            if stack_diff.replay > 0:
                out.write("{}replay = replay + parser.stack[-{}:]\n".format(indent, stack_diff.replay))
            if stack_diff.replay + stack_diff.pop > 0:
                out.write("{}del parser.stack[-{}:]\n".format(indent, stack_diff.replay + stack_diff.pop))
            out.write("{}parser.shift_list(replay, lexer)\n".format(indent))
            return indent, False
        if isinstance(act, Accept):
            out.write("{}raise ShiftAccept()\n".format(indent))
            return indent, False
        if isinstance(act, Lookahead):
            raise ValueError("Unexpected Lookahead action")
        if isinstance(act, CheckNotOnNewLine):
            out.write("{}if not parser.check_not_on_new_line(lexer, {}):\n".format(indent, -act.offset))
            out.write("{}    return\n".format(indent))
            return indent, True
        if isinstance(act, FilterFlag):
            out.write("{}if parser.flags[{}][-1] == {}:\n".format(indent, act.flag, act.value))
            return indent + "    ", True
        if isinstance(act, PushFlag):
            out.write("{}parser.flags[{}].append({})\n".format(indent, act.flag, act.value))
            return indent, True
        if isinstance(act, PopFlag):
            out.write("{}parser.flags[{}].pop()\n".format(indent, act.flag))
            return indent, True
        if isinstance(act, FunCall):
            enclosing_call_offset = act.offset

            def map_with_offset(args: typing.Iterable[OutputExpr]) -> typing.Iterator[str]:
                get_value = "parser.stack[{}].value"
                for a in args:
                    if isinstance(a, int):
                        yield get_value.format(-(a + enclosing_call_offset))
                    elif isinstance(a, str):
                        yield a
                    elif isinstance(a, Some):
                        # `typing.cast` because Some isn't generic, unfortunately.
                        yield next(map_with_offset([typing.cast(OutputExpr, a.inner)]))
                    elif a is None:
                        yield "None"
                    else:
                        raise ValueError(a)

            if act.method == "id":
                assert len(act.args) == 1
                out.write("{}{} = {}\n".format(indent, act.set_to, next(map_with_offset(act.args))))
            else:
                methods.add(act)
                out.write("{}{} = parser.methods.{}({})\n".format(
                    indent, act.set_to, method_name_to_python(act.method),
                    ", ".join(map_with_offset(act.args))
                ))
            return indent, True
        if isinstance(act, Seq):
            for a in act.actions:
                indent, fallthrough = write_action(a, indent)
            return indent, fallthrough
        raise ValueError("Unknown action type")

    # Write code correspond to each action which has to be performed.
    for i, state in enumerate(parse_table.states):
        assert i == state.index
        if state.epsilon == []:
            continue
        out.write("def state_{}_actions(parser, lexer):\n".format(i))
        out.write("{}\n".format(parse_table.debug_context(i, "\n", "    # ")))
        out.write("    value = None\n")
        for action, dest in state.edges():
            assert isinstance(action, Action)
            try:
                indent, fallthrough = write_action(action, "    ")
            except Exception:
                print("Error while writing code for {}\n\n".format(state))
                parse_table.debug_info = True
                print(parse_table.debug_context(state.index, "\n", "# "))
                raise
            if fallthrough:
                if parse_table.states[dest].epsilon != []:
                    # This is a transition to an action.
                    out.write("{}state_{}_actions(parser, lexer)\n".format(indent, dest))
                else:
                    # This is a transition to a shift.
                    out.write("{}top = parser.stack.pop()\n".format(indent))
                    out.write("{}top = StateTermValue({}, top.term, top.value, top.new_line)\n"
                              .format(indent, dest))
                    out.write("{}parser.stack.append(top)\n".format(indent))
            out.write("{}return\n".format(indent))
        out.write("\n")

    out.write("actions = [\n")
    for i, state in enumerate(parse_table.states):
        assert i == state.index
        out.write("    # {}.\n{}\n".format(i, parse_table.debug_context(i, "\n", "    # ")))
        if state.epsilon == []:
            row: typing.Dict[typing.Union[Term, ErrorTokenClass], StateId]
            row = {term: dest for term, dest in state.edges()}
            for err, dest in state.errors.items():
                del row[err]
                row[ErrorToken] = dest
            out.write("    " + repr(row) + ",\n")
        else:
            out.write("    state_{}_actions,\n".format(i))
        out.write("\n")
    out.write("]\n\n")

    out.write("error_codes = [\n")

    def repr_code(symb: typing.Optional[ErrorSymbol]) -> str:
        if isinstance(symb, ErrorSymbol):
            return repr(symb.error_code)
        return repr(symb)

    SLICE_LEN = 16
    for i in range(0, len(parse_table.states), SLICE_LEN):
        states_slice = parse_table.states[i:i + SLICE_LEN]
        out.write("    {}\n".format(
            " ".join(repr_code(state.get_error_symbol()) + ","
                     for state in states_slice)))
    out.write("]\n\n")

    out.write("goal_nt_to_init_state = {}\n\n".format(
        repr({nt.name: goal for nt, goal in parse_table.named_goals})
    ))

    if len(parse_table.named_goals) == 1:
        init_nt = parse_table.named_goals[0][0]
        default_goal = '=' + repr(init_nt.name)
    else:
        default_goal = ''

    # Class used to provide default methods when not defined by the caller.
    out.write("class DefaultMethods:\n")
    for act in methods:
        assert isinstance(act, FunCall)
        args = ", ".join("x{}".format(i) for i in range(len(act.args)))
        name = method_name_to_python(act.method)
        out.write("    def {}(self, {}):\n".format(name, args))
        out.write("        return ({}, {})\n".format(repr(name), args))
    if not methods:
        out.write("    pass\n")
    out.write("\n")

    out.write("class Parser(runtime.Parser):\n")
    out.write("    def __init__(self, goal{}, builder=None):\n".format(default_goal))
    out.write("        if builder is None:\n")
    out.write("            builder = DefaultMethods()\n")
    out.write("        super().__init__(actions, error_codes, goal_nt_to_init_state[goal], builder)\n")
    out.write("\n")
Esempio n. 33
0
def write_json_file(writer: io.TextIOBase,
                    hash_items: List[HashSelection],
                    group_items: List[GroupSelection],
                    *,
                    tab: str = '\t',
                    readable: bool = True,
                    sort: bool = True):
    writer.write('{')
    first_item = True

    for item in hash_items:
        # comma-separate after first item
        if first_item:
            first_item = False
            # newline and indent
            if readable: writer.write('\n' + tab)
        else:
            writer.write(',')
            # double-newline and indent
            if readable: writer.write('\n\n' + tab)

        writer.write(f'"{item.varname}":')

        # visual space between key and value
        if readable: writer.write(' ')

        hash_list = select_hashes(item.hashes, *item.prefixes, sort=sort)
        write_hashes_dict(writer, hash_list, readable=False, python=False)

    if readable and hash_items and group_items:
        writer.write(',\n\n')  # visual separation between hashes and groups
        first_item = True  # set first item again because we already placed comma

    for item in group_items:
        # comma-separate after first item
        if first_item:
            first_item = False
            # newline and indent
            if readable: writer.write('\n' + tab)
        else:
            writer.write(',')
            # double-newline and indent
            if readable: writer.write('\n\n' + tab)

        writer.write(f'"{item.varname}":')

        # visual space between key and value
        if readable: writer.write(' ')

        group_list = select_groups(item.groups, sort=sort)
        write_groups_list(writer,
                          group_list,
                          item.type is int,
                          readable=False,
                          python=False)

    # newline before closing brace
    if readable: writer.write('\n')
    writer.write('}')
Esempio n. 34
0
def stdout_and_log(message: str, stdout: TextIOBase):
    """Output message to both stdout and configured logger"""
    stdout.write(message)
    logger.info(message)
 def write_commands(self, out: io.TextIOBase):
     out.write("#pragma once\n")
     out.write("#include \"" + os.path.join(
         VULKAN_HEADERS_DIR, "include", "vulkan", "vk_layer.h") + "\"\n\n")
     out.write("typedef struct VkLayerInstanceDispatchTable {\n")
     for extension, commands in self.instance_dispatch_table.items():
         self.write_extension_commands(out, extension, commands,
                                       "  PFN_vk{0} {0};\n")
     out.write("} VkLayerInstanceDispatchTable;\n\n")
     out.write("typedef struct VkLayerDispatchTable {\n")
     for extension, commands in self.device_dispatch_table.items():
         self.write_extension_commands(out, extension, commands,
                                       "  PFN_vk{0} {0};\n")
     out.write("} VkLayerDeviceDispatchTable;\n\n")
     out.write(
         "inline void init_layer_instance_dispatch_table( VkInstance instance, PFN_vkGetInstanceProcAddr gpa, VkLayerInstanceDispatchTable& dt ) {\n"
     )
     for extension, commands in self.instance_dispatch_table.items():
         self.write_extension_commands(
             out, extension, commands,
             "  dt.{0} = (PFN_vk{0}) gpa( instance, \"vk{0}\" );\n")
     out.write("}\n\n")
     out.write(
         "inline void init_layer_device_dispatch_table( VkDevice device, PFN_vkGetDeviceProcAddr gpa, VkLayerDeviceDispatchTable& dt ) {\n"
     )
     for extension, commands in self.device_dispatch_table.items():
         self.write_extension_commands(
             out, extension, commands,
             "  dt.{0} = (PFN_vk{0}) gpa( device, \"vk{0}\" );\n")
     out.write("}\n")
Esempio n. 36
0
def data(text_io: io.TextIOBase, rounds: int) -> str:

    return ''.join([text_io.readline() for r in range(rounds)])
Esempio n. 37
0
 def write(self, depth: int = 0, writer: TextIOBase = sys.stdout):
     sp = self._sp(depth)
     sp2 = self._sp(depth + 1)
     sp3 = self._sp(depth + 2)
     writer.write(f'{sp}label: {self.label_col} => ' +
                  f'{", ".join(self.label_values)}\n')
     writer.write(f'{sp}continuous:\n')
     for c in self.continuous:
         writer.write(f'{sp2}{c}\n')
     writer.write(f'{sp}discrete:\n')
     for name, labels in self.descrete.items():
         writer.write(f'{sp2}{name}:\n')
         for label in labels:
             writer.write(f'{sp3}{label}\n')
Esempio n. 38
0
 def __init__(self, level, tag):
     TextIOBase.__init__(self)
     self.level = level
     self.tag = tag
    def __write_output(self, out: TextIOBase, bands: Bands):
        descriptor = self.__region.descriptor()
        if type(descriptor) is dict:
            file_name = descriptor[Band.RED].file_name()
            band_name = ",".join([
                descriptor[Band.RED].band_name(),
                descriptor[Band.GREEN].band_name(),
                descriptor[Band.BLUE].band_name()
            ])
            wavelength = ",".join([
                descriptor[Band.RED].wavelength_label(),
                descriptor[Band.GREEN].wavelength_label(),
                descriptor[Band.BLUE].wavelength_label()
            ])
        else:
            file_name = descriptor.file_name()
            band_name = descriptor.band_name()
            wavelength = descriptor.wavelength_label()

        out.write("# name:{0}\n".format(self.__region.display_name()))
        out.write("# file name:{0}\n".format(file_name))
        out.write("# band name:{0}\n".format(band_name))
        out.write("# wavelength:{0}\n".format(wavelength))
        out.write("# image width:{0}\n".format(self.__region.image_width()))
        out.write("# image height:{0}\n".format(self.__region.image_height()))
        if self.__projection is not None:
            out.write("# projection:{0}\n".format(self.__projection))
        out.write("# description:{0}\n".format(self.__region.description()))
        out.write("# data:\n")

        out.write(self.__get_data_header(bands))
        band_index: int = 0
        for r in self.__region:
            if self.__has_map_info:
                out.write(
                    self.__output_format.format(r.x_point() + 1,
                                                r.y_point() + 1,
                                                r.x_coordinate(),
                                                r.y_coordinate()))
            else:
                out.write(
                    self.__output_format.format(r.x_point() + 1,
                                                r.y_point() + 1))

            if bands is not None:
                out.write(
                    "," +
                    ",".join([str(item)
                              for item in bands.bands(band_index)]) + "\n")
                band_index += 1
            else:
                out.write("\n")
Esempio n. 40
0
 def _from_stream(stream: TextIOBase) -> str:
     result = stream.read()
     return result