Example #1
0
 def evaluate_interactive(self,env = None):
     ## use a persistent environment for interactive interpreter
     if ( not env ):
         env = Environment( self.call_stack, self.function_map, \
                                self.builtin_map, self.debug )
         env.call_function("__toplevel__") ##some context
     ## use this from the interactive-interpreter
     rval = self.ast.evaluate(env)
     return [ rval, env ]
Example #2
0
 def evaluate(self):
     env = Environment( self.call_stack, self.function_map, \
                            self.builtin_map, self.debug, int(self.MAX_REC_DEPTH/10) )
     env.call_function("__toplevel__")  ##some context
     return self.ast.evaluate(env)
Example #3
0
java_import(gateway.jvm, "org.apache.spark.SparkConf")
java_import(gateway.jvm, "org.apache.spark.api.java.*")
java_import(gateway.jvm, "org.apache.spark.api.python.*")
java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")
java_import(gateway.jvm, "org.apache.spark.sql.*")
java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
java_import(gateway.jvm, "scala.Tuple2")

jconf = entry_point.getSparkConf()
jsc = entry_point.getJavaSparkContext()

job_id = entry_point.getJobId()
javaEnv = entry_point.getEnv()
working_dir = javaEnv.workingDir() or '/tmp/amaterasu'
env = Environment(javaEnv.name(), javaEnv.master(), javaEnv.inputRootPath(),
                  javaEnv.outputRootPath(), working_dir,
                  javaEnv.configuration())
conf = SparkConf(_jvm=gateway.jvm, _jconf=jconf)

sc = SparkContext(jsc=jsc, gateway=gateway, conf=conf)
spark = SparkSession(sc, entry_point.getSparkSession())

ama_context = AmaContext(sc, spark, job_id, env)

while True:
    actionData = queue.getNext()
    resultQueue = entry_point.getResultQueue(actionData._2())
    actionSource = actionData._1()
    tree = ast.parse(actionSource)
    exports = actionData._3()