def model(): "Parse [Model]." nm = yield name if DBG: print(" ", nm) res = yield many1(node(Model_keywords, IBIS_keywords, debug=DBG)) return {nm: Model(dict(res))}
def node(): "Parse AMI node." yield lparen label = yield node_name values = yield many1(expr) yield rparen return (label, values)
def InfixOpDecl(): side = yield (ps.token(TOKEN.INFIXL) ^ ps.token(TOKEN.INFIXR)) if side.typ is TOKEN.INFIXL: found_kind = FunKind.INFIXL elif side.typ is TOKEN.INFIXR: found_kind = FunKind.INFIXR else: raise Exception("Should never happen") found_fixity = yield ps.token(TOKEN.INT) operator = yield ps.token(TOKEN.OP_IDENTIFIER) yield ps.token(TOKEN.PAR_OPEN) a = yield ps.token(TOKEN.IDENTIFIER) yield ps.token(TOKEN.COMMA) b = yield ps.token(TOKEN.IDENTIFIER) yield ps.token(TOKEN.PAR_CLOSE) typesig = yield ps.times(InfFunTypeSig, 0, 1) typesig = typesig[0] if len(typesig) > 0 else None yield ps.token(TOKEN.CURL_OPEN) decls = yield ps.many(VarDecl) found_stmts = yield ps.many1(Stmt) yield ps.token(TOKEN.CURL_CLOSE) return AST.FUNDECL(kind=found_kind, fixity=found_fixity, id=operator, params=[a, b], type=typesig, vardecls=decls, stmts=found_stmts)
def matrix_parser(): cell = many1(digit()).parsecmap(''.join).parsecmap(int) height = yield cell yield (string(",") << spaces()) width = yield cell yield string('\n') row = separated(cell, string(",") << spaces(), mint=width, maxt=width) rows = separated(row, string('\n'), mint=height, maxt=height) return rows
def pins(): "Parse [Component].[Pin]." def filt(x): (_, (mod, _)) = x m = mod.upper() return (not ((m == "POWER") or (m == "GND") or (m == "NC"))) yield (lexeme(string("signal_name")) << lexeme(string("model_name"))) rlcs = yield optional(count(rlc, 3), []) prs = yield many1(pin(rlcs)) prs_filt = list(filter(filt, prs)) return dict(prs_filt)
def PrefixOpDecl(): yield ps.token(TOKEN.PREFIX) operator = yield ps.token(TOKEN.OP_IDENTIFIER) yield ps.token(TOKEN.PAR_OPEN) varname = yield ps.token(TOKEN.IDENTIFIER) yield ps.token(TOKEN.PAR_CLOSE) typesig = yield ps.times(PreFunTypeSig, 0, 1) typesig = typesig[0] if len(typesig) > 0 else None yield ps.token(TOKEN.CURL_OPEN) decls = yield ps.many(VarDecl) found_stmts = yield ps.many1(Stmt) yield ps.token(TOKEN.CURL_CLOSE) return AST.FUNDECL(kind=FunKind.PREFIX, fixity=None, id=operator, params=[varname], type=typesig, vardecls=decls, stmts=found_stmts)
def FunDecl(): fname = yield ps.token(TOKEN.IDENTIFIER) yield ps.token(TOKEN.PAR_OPEN) args = yield ps.times(FArgs, 0, 1) args = args[0] if len(args) > 0 else args yield ps.token(TOKEN.PAR_CLOSE) typesig = yield ps.times(FunTypeSig, 0, 1) typesig = typesig[0] if len(typesig) > 0 else None yield ps.token(TOKEN.CURL_OPEN) decls = yield ps.many(VarDecl) found_stmts = yield ps.many1(Stmt) yield ps.token(TOKEN.CURL_CLOSE) return AST.FUNDECL(kind=FunKind.FUNC, fixity=None, id=fname, params=args, type=typesig, vardecls=decls, stmts=found_stmts)
def _parse_fasta(self, filehandle, sep="|"): """ Parse a fasta file. The header is split into fields on 'sep'. The sequence is added as a final field. """ p_header = parsec.string(">") >> parsec.regex("[^\n\r]*") << parsec.spaces() p_seq = ( parsec.sepBy1( parsec.regex("[^>\n\r]*"), sep=parsec.regex("[\r\n\t ]+") ).parsecmap(concat) << parsec.spaces() ) p_entry = p_header + p_seq p_fasta = parsec.many1(p_entry) log(f"Reading {file_str(filehandle)} as a fasta file:") try: entries = p_fasta.parse(filehandle.read()) except AttributeError: # in case I want to pass in a list of strings, e.g., in tests entries = p_fasta.parse(filehandle) row = [h.split(sep) + [q] for (h, q) in entries] return row
def word(self): return psc.many1(psc.letter()).parsecmap(lambda x: ''.join(x))
def end(): "Parse [End]." yield keyword("End") return eof # [Model] @generate("[Ramp]") def ramp(): "Parse [Ramp]." lines = yield count(ramp_line, 2) return dict(lines) Model_keywords = { "pulldown": many1(vi_line), "pullup": many1(vi_line), "ramp": ramp, "algorithmic_model": many1(ex_line) << keyword('end_algorithmic_model'), "voltage_range": typminmax, "temperature_range": typminmax, "ground_clamp": many1(vi_line), "power_clamp": many1(vi_line), } @generate("[Model]") def model(): "Parse [Model]." nm = yield name if DBG:
def row(): return ( Parser.tokenize(Parser.time_parser()) + psc.sepBy(Parser.row_element(), psc.many1(psc.string(' '))) )
def tokenize(p): return p << psc.many1(psc.string(' '))
def num_i(): tmp = yield P.many1(P.digit()) return int(''.join(tmp))
def __init__(self, file_name): c = open(file_name, 'rb').read().decode('utf-8') self.molecules = P.many1(sdf_molecule).parse(c)
def fn(): "many1(p) >> filter(True)" nodes = yield many1(p) res = list(filter(None, nodes)) return res
def parse_list(): """Parse a list as a ()-enclosed sequence of expressions.""" yield parsec.string('(') vals = yield parsec.sepBy(parse_expr, parsec.many1(parsec.space())) yield parsec.string(')') return wtypes.List(vals)
def modsel(): "Parse [Model Selector]." nm = yield name res = yield many1(name + rest_line) return {nm: res}
def comp(): "Parse [Component]." nm = yield lexeme(name) res = yield many1(node(Component_keywords, IBIS_keywords, debug=DBG)) return {nm: Component(dict(res))}
def package(): "Parse package RLC values." rlcs = yield many1(param) if DBG: print(f"rlcs: {rlcs}") return dict(rlcs)
def _number() -> parsec.Parser: return parsec.many1(parsec.digit()).parsecmap(_evaluate_number)
def parse_int(): """Parse an integer as a sequence of digits.""" digits = yield parsec.many1(parsec.digit()) return wtypes.Integer(int(''.join(digits)))