def double_py2scala(self,arr): import asp.jit.asp_module as asp_module import avroInter.avro_backend as avro_backend import asp.codegen.ast_tools as ast_tools import asp.codegen.codegenScala as codegenScala import ast mod = asp_module.ASPModule(use_scala=True) f = open('func.py') rendered_py = f.read() func_ast_py = ast.parse(rendered_py) func_ast_scala = ast_tools.ConvertPyAST_ScalaAST().visit(func_ast_py) rendered_scala = codegenScala.to_source(func_ast_scala) # the first arg below specifies the main function in the set of input functions, # the one to be called first by main with the input args rendered = avro_backend.generate_scala_object("double","",rendered_scala) #NOTE: must name function differently here than the mainfunc above # because classpaths get goofed up if they're named the same as in the above line # Also, in the final line, be sure to call it by the name added below mod.add_function("double_outer", rendered, backend="scala") print 'RENDERED PY IS:', rendered_py print '-------------------------------------------------------------' print 'RENDERED SCALA IS:', rendered_scala print '-------------------------------------------------------------' print 'FULLY RENDERED SCALA WITH MAIN IS:', rendered print '-------------------------------------------------------------' return mod.double_outer(arr)
def double_using_scala(self, arr): import asp.jit.asp_module as asp_module import avroInter.avro_backend as avro_backend mod = asp_module.ASPModule(use_scala=True) rendered = avro_backend.generate_scala_object("double","func1.scala") """ f = open("func.scala") rendered = f.read() f.close() """ #mainfunc name needs to be the same as the name for the added function below mod.add_function("double", rendered, backend="scala") return mod.double(arr, 2, "asdfasdf")
def run_cloud(self,data): mod = asp_module.ASPModule(cache_dir = "/root/spark/examples/target/scala-2.9.1.final/classes/", use_scala=True) scala_estimate= ast_tools.ConvertPyAST_ScalaAST().visit(self.estimate_ast) scala_reduce = ast_tools.ConvertPyAST_ScalaAST().visit(self.reduce_ast) scala_average = ast_tools.ConvertPyAST_ScalaAST().visit(self.average_ast) TYPE_DECS = (['compute_estimate', ['BootstrapData'], 'double'], ['reduce_bootstraps', [('list', 'double')], 'double'], ['average', [('array', 'double')], 'double']) scala_gen = SourceGenerator(TYPE_DECS) rendered_scala = scala_gen.to_source(scala_estimate)+'\n' + scala_gen.to_source(scala_reduce) \ +'\n'+ scala_gen.to_source(scala_average) rendered_scala = combine(rendered_scala) rendered = avro_backend.generate_scala_object("run","",rendered_scala) #NOTE: must append outer to function name above to get the classname # because of how scala_object created by avro_backend mod.add_function("run_outer", rendered, backend = "scala") email_filename = data[0] model_filename = data[1] return mod.run_outer(email_filename, model_filename, self.dim, self.num_subsamples, self.num_bootstraps, self.subsample_len_exp)