Exemple #1
0
    def tryParse(self):
	self.isConstructor = False
	self.access = ''
	publicToken = self.tokenizer.check('public')
	if publicToken:
	    self.access = 'public'
	else:
	    publicToken = self.tokenizer.check('private')
	    if publicToken:
		self.access = 'private'
	if not self.tokenizer.check('alias'):
	    if publicToken:
		self.tokenizer.push(publicToken)
	    return False
	self.isClass = False
	self.isFinal = True # all pure aliases are final
	self.isAlias = True # duh
	self.selector = tryParse(SelectorLiteral, self.tokenizer)
	if not self.selector:
	    raise SyntaxError('Expected selector; found: ' + \
		self.tokenizer.nextToken().errForm())
	self.aliasTarget = tryParse(SelectorLiteral, self.tokenizer)
	if not self.aliasTarget:
	    raise SyntaxError('Expected selector; found: ' + \
		self.tokenizer.nextToken().errForm())
	self.tokenizer.require(';')
	return True
Exemple #2
0
    def tryParse(self):
	self.access = ''
	publicToken = self.tokenizer.check('public')
	if publicToken:
	    self.access = 'public'
	else:
	    publicToken = self.tokenizer.check('private')
	    if publicToken:
		self.access = 'private'
	dollarSign = self.tokenizer.check('$')
	if not dollarSign:
	    if publicToken:
		self.tokenizer.push(publicToken)
	    return False
	classToken = self.tokenizer.nextToken()
	self.cls = classToken.str
	self.isClass = self.tokenizer.check('class')
	self.isMixin = False # TODO mixin methods
	mixinToken = self.tokenizer.check('mixin')
	self.expr = None
	if mixinToken:
	    self.tokenizer.push(mixinToken)
	    self.tokenizer.push(classToken)
	    self.tokenizer.push(dollarSign)
	    if publicToken:
		self.tokenizer.push(publicToken)
	    return False
	# TODO constructor code generation
	self.isConstructor = self.tokenizer.check('constructor')
	self.isAbstract = self.tokenizer.check('abstract')
	self.isUndefine = self.tokenizer.check('undefine')
	if not self.isAbstract:
	    self.isAbstract = self.tokenizer.check('optional')
	self.isFinal = self.tokenizer.check('final')
	self.isAlias = self.tokenizer.check('alias')
	self.special = False
	self.selector = tryParse(SelectorLiteral, self.tokenizer)
	if self.selector:
	    if self.isAlias:
		self.aliasTarget = tryParse(SelectorLiteral, self.tokenizer)
		if not self.aliasTarget:
		    raise SyntaxError('Expected selector; found: ' + \
			    self.tokenizer.nextToken().errForm())
		self.tokenizer.require(';')
		return True
	    if self.isAbstract:
		self.tokenizer.require(';')
		return True
	    else:
		self.parseArgsWithSelector()
	else:
	    self.parseInterwoven()
	if self.special:
	    return True
	else:
	    self.parseSecureLevelDecl()
	    return self.parseMethodCode()
Exemple #3
0
    def tryParse(self):
	self.mixins = []
	self.methodTable = None
	self.classMethodTable = None
	self.mixinMethodTable = None
	publicToken = self.tokenizer.check('public')
	if not publicToken:
	    publicToken = self.tokenizer.check('private')
	mixinToken = self.tokenizer.check('mixin')
	if not mixinToken:
	    if publicToken:
		self.tokenizer.push(publicToken)
	    return False
	sigilToken = self.tokenizer.check('$')
	if not sigilToken:
	    raise SyntaxError('expected sigil: '+\
		    self.tokenizer.nextToken().errForm())
	idToken = self.tokenizer.nextToken()
	if idToken.type() != TokenTypes.identifier:
	    raise SyntaxError('expected: identifier: '+idToken.errForm())
	self.name = idToken.str
	self.selector = tryParse(SelectorLiteral,self.tokenizer)
	self.tokenizer.require(';')
	if self.selector:
	    self.methods = [(self.selector,generateTrueMethod(self.selector))]
	else:
	    self.methods = []
	return True
Exemple #4
0
def parseExpressionSimple(tokenizer):
    from returns import ReturnExpression
    from assignment import parseAssignmentExpression
    res = tryParse(ReturnExpression,tokenizer)
    if not res:
	res = parseAssignmentExpression(tokenizer)
    if res:
	return res
Exemple #5
0
    def tryParse(self):
	self.parent = None
	self.exceptionHandlingBlocks = []
	self.exception = False
	self.mixins = []
	publicToken = self.tokenizer.check('public')
	privateToken = self.tokenizer.check('private')
	classToken = self.tokenizer.check('class')
	if not classToken:
	    classToken = self.tokenizer.check('exception')
	    if classToken:
		self.exception = True
	if classToken:
	    self.singleton = False
	else:
	    singletonToken = self.tokenizer.check('singleton')
	    if not singletonToken:
		if privateToken:
		    self.tokenizer.push(privateToken)
		if publicToken:
		    self.tokenizer.push(publicToken)
		return False
	    else:
		self.singleton = True
	sigilToken = self.tokenizer.check('$')
	if not sigilToken:
	    raise SyntaxError('sigil sigil error')
	idToken = self.tokenizer.nextToken()
	if idToken.type() != TokenTypes.identifier:
	    raise SyntaxError('Token type non-id -id '+idToken.errForm())
	self.name = idToken.str
	self.checkNameUniqueness()
	if self.tokenizer.check('('):
	    if not self.tokenizer.check('$'):
		raise SyntaxError('no sigil in class decl')
	    self.parent = self.tokenizer.nextToken().str
	    if self.parent == self.name and self.name != 'Object':
		raise SemanticError('Only $Object may inherit from itself.')
	    if not self.tokenizer.check(')'):
		raise SyntaxError('no close paren in class decl')
	else:
	    if self.exception:
		self.parent = 'Exception'
	    else:
		self.parent = 'Object'
	self.ivars = []
	if self.exception:
	    self.selector = tryParse(SelectorLiteral,self.tokenizer)
	while not self.tokenizer.check(';'):
	    if not self.tokenizer.check('!'):
		raise SyntaxError('; or !')
	    self.ivars += [self.tokenizer.nextToken().str]
	    if not self.tokenizer.check(','):
		self.tokenizer.require(';')
		break
	self.methodTable = None
	return True
Exemple #6
0
    def tryParse(self):
	if not tryParse(CompassDecl,self.tokenizer):
	    return False
	self.classDecl = []
	self.methodDecl = []
	self.aliasDecl = []
	self.mixinDecl = []
	self.globalDecl = []
	self.enumDecl = []
	while True:
	    newClass = tryParse(ClassDecl,self.tokenizer)
	    newMixin = tryParse(MixinDecl,self.tokenizer)
	    newMixinReference = tryParse(MixinReference,self.tokenizer)
	    newMethod = tryParse(MethodDecl,self.tokenizer)
	    newMessage = tryParse(MessageDecl,self.tokenizer)
	    newAlias = tryParse(AliasDecl,self.tokenizer)
	    compassDecl = tryParse(CompassDecl,self.tokenizer)
	    newGlobal = tryParse(GlobalDecl,self.tokenizer)
	    newEnum = tryParse(EnumDecl,self.tokenizer)
	    if not newClass and not newMethod and not compassDecl\
		    and not newMixin and not newMixinReference\
		    and not newAlias and not newMessage and not newGlobal\
		    and not newEnum:
		break
	    # Note: messages are completely ignored so we do not keep track
	    #       of them
	    if newMethod:
		self.methodDecl += [newMethod]
	    if newAlias:
		self.aliasDecl += [newAlias]
	    if newMixin:
		self.mixinDecl += [newMixin]
	    if newMixinReference:
		self.methodDecl += [newMixinReference]
	    if newClass:
		self.classDecl += [newClass]
	    if newGlobal:
		self.globalDecl += [newGlobal]
	    if newEnum:
		self.enumDecl += [newEnum]
	return True
Exemple #7
0
    def tryParse(self):
	sigil = self.tokenizer.check(self.sigil())
	if not sigil:
	    return False
	if self.acceptsPackages():
	    self.package = tryParse(PackageSpec,self.tokenizer)
	    id = self.tokenizer.nextToken()
	else:
	    id = self.tokenizer.checkNoWhitespace()
	if not self.checkNameToken(id):
	    if not id:
		raise SyntaxError("nothing after sigil" + sigil.errForm())
	    raise SyntaxError("non-id after sigil: "+id.errForm())
	self.name = id.str
	return True
Exemple #8
0
    def tryParse(self):
	token = self.tokenizer.checkRegex(compile('^#'))
	if not token:
	    return False
	if token.str != '#':
	    self.messageName = token.str[1:]
	    self.package = '__operator'
	    self.operator = True
	    return True
	self.operator = False
	self.package = tryParse(PackageSpec,self.tokenizer)

	id = self.tokenizer.nextToken()
	#id is used to store tentative extensions of the scope of
	#the selector literal
	if id == None or id.type() != TokenTypes.identifier:
	    # without a package, it might still be a symbol
	    if self.package == None:
		self.tokenizer.push(id)
		self.tokenizer.push(token)
		return False
	    else: #otherwise, can't be much of anything
		raise SyntaxError("Expect id after pkg "+
			str(self.package)+" token "+id.errForm())
	# we are guaranteed success from here on out
	self.messageName = id.str
	# is it a keyword-message?
	if not self.tokenizer.checkNoWhitespace(':'):
	    # no, we're finished
	    self.argCount = 0
	    return True
	# yes, keyword message
	# TODO: ugly code! ick! does it even work?
	self.messageName += ':'
	self.argCount = 1
	id = self.tokenizer.nextToken()
	if (not id) or id.type() != TokenTypes.identifier:
	    self.tokenizer.push(id)
	    return True
	while self.tokenizer.checkNoWhitespace(':'):
	    self.messageName += id.str + ':'
	    self.argCount += 1
	    id = self.tokenizer.checkNoWhitespace()
	    if (not id) or id.type() != TokenTypes.identifier:
		break
	if id:
	    self.tokenizer.push(id)
	return True
Exemple #9
0
    def tryParse(self):
	self.access = ''
	publicToken = self.tokenizer.check('public')
	if publicToken:
	    self.access = 'public'
	else:
	    publicToken = self.tokenizer.check('private')
	    if publicToken:
		self.access = 'private'
	self.selector = tryParse(SelectorLiteral, self.tokenizer)
	if not self.selector:
	    if publicToken:
		self.tokenizer.push(publicToken)
	    return False
	self.tokenizer.require(';')
	return True
Exemple #10
0
def parseKeywordMessageSend(tokenizer):
    rcvr = parseExpressionLevel(10000,tokenizer)
    superToken = super = None
    if not rcvr:
	superToken = tokenizer.nextToken()
	if superToken and superToken.str == 'super':
	    super = rcvr = SuperReference()
	else:
	    if superToken:
		tokenizer.push(superToken)
	    superToken = None
    package = tryParse(PackageSpec,tokenizer)
    keywordId = tokenizer.nextToken()
    colon = tokenizer.checkNoWhitespace(':')
    if not keywordId or keywordId.type() != TokenTypes.identifier or not colon:
	if colon:
	    tokenizer.push(colon)
	if package:
	    package.pushBack(tokenizer)
	if keywordId:
	    tokenizer.push(keywordId)
	if superToken:
	    tokenizer.push(superToken)
	return rcvr
    # committed
    methodName = ''
    args = []
    while keywordId and keywordId.type() == TokenTypes.identifier and colon:
	methodName += keywordId.str + ':'
	args += [parseExpressionLevel(10000,tokenizer)]
	keywordId = tokenizer.nextToken()
	colon = tokenizer.checkNoWhitespace(':')
    if keywordId:
	tokenizer.push(keywordId)
    if rcvr == None:
	rcvr = SelfReference()
    return KeywordMessageSend(rcvr,package,methodName,args)
Exemple #11
0
from __future__ import with_statement
from sys import argv
from Compass.tokenizer import MultiFileTokenizer
from Compass.SingleFile.file import CompassFile
from Compass.parser import tryParse

if __name__ == '__main__':
    mainClass = argv[1]
    tokenizer = MultiFileTokenizer(argv[2:])
    res = tryParse(CompassFile,tokenizer)
    t = tokenizer.nextToken()
    if t:
	print 'Error: tokens left over:',t.errForm()
	exit()
    print "#initialize"
    print "Global $BasicConsoleInputChannel"
    print "MCall 0 'new'"
    print "Global $BasicConsoleOutputChannel"
    print "MCall 0 'new'"
    print "Int 2"
    print "Global $BasicConsoleOutputChannel"
    print "MCall 1 'newWithFileDescriptor:'"
    print "Global $"+mainClass
    print "MCall 3 'newWithInput:andOutput:andError:'"
    print "MCall 0 'run'"
    print "Return"

    print "#true"
    print "Global $true"
    print "Return"
Exemple #12
0
def parsePostfixExpression(tokenizer):
    if tokenizer.check('['):
	expr = parseExpression(tokenizer)
	if expr:
	    if tokenizer.check(','):
		list = [expr]
		list += parseExpressionList(tokenizer)
		if not tokenizer.check(']'):
		    raise SyntaxError('close your brackets: '+\
			    tokenizer.nextToken().errForm())
		expr = List(list)
	    else:
		expr = parseRestOfSliceCall(tokenizer,
			SelfReference(),
			expr)
	else:
	    if tokenizer.check(']'):
		expr = List([])
	    else:
		raise SyntaxError('close your brackets: '+
			tokenizer.nextToken().errForm())
    elif tokenizer.check('('):
	expr = parseExpression(tokenizer)
	if expr:
	    if tokenizer.check(','):
		list = [expr]
		list += parseExpressionList(tokenizer)
		if not tokenizer.check(')'):
		    raise SyntaxError('close your parentheses: '+\
			    tokenizer.nextToken().errForm())
		expr = CallExpression(SelfReference(),list)
	    else:
		if not tokenizer.check(')'):
		    raise SyntaxError('not right paren: '+\
			    tokenizer.nextToken().errForm())
	    #expr has been stored into
	else:
	    if tokenizer.check(')'):
		expr = CallExpression(SelfReference(),[])
	    else:
		raise SyntaxError('paren-?'+tokenizer.nextToken().errForm())
    else:
	expr = parsePrimaryExpression(tokenizer)
    implicitSelf = None
    if not expr:
	if tokenizer.check('super'):
	    expr = SuperReference()
	else:
	    expr = SelfReference()
	    implicitSelf = expr
    # expr is the only variable we care about, and it is guaranteed to have
    # something in it

    # so now scoop up any postfixes
    oldExpr = None
    while oldExpr != expr:
	# expr refers to the expression so far
	oldExpr = expr
	if tokenizer.check('['):
	    index = parseExpression(tokenizer)
	    expr = parseRestOfSliceCall(tokenizer,expr,index)
	if tokenizer.check('('):
	    expressionList = parseExpressionList(tokenizer)
	    if not tokenizer.check(')'):
		raise SyntaxError('needed closing ): '+
			tokenizer.nextToken().errForm())
	    expr = CallExpression(expr,expressionList)
	if tokenizer.check('{'):
	    block = parseExpression(tokenizer)
	    expr = GivenExpression(expr,block)
	    if not tokenizer.check('}'):
		raise SyntaxError("expecting '}': "+\
			tokenizer.nextToken().errForm())
	package = tryParse(PackageSpec,tokenizer)
	unaryMsg = tokenizer.nextToken()
	if unaryMsg:
	    colon = tokenizer.checkNoWhitespace(':')
	    if not colon and unaryMsg.type() == TokenTypes.identifier:
		expr = UnaryMessageSend(expr,package,unaryMsg.str)
	    else:
		if colon:
		    tokenizer.push(colon)
		tokenizer.push(unaryMsg)
		if package:
		    package.pushBack(tokenizer)
	else:
	    if package:
		package.pushBack(tokenizer)
    if expr == implicitSelf:
	return None
    return expr
Exemple #13
0
def parseAssignmentExpression(tokenizer):
    res = tryParse(Assignment,tokenizer)
    if not res:
	res = parseReplacementExpression(tokenizer)
    return res