Example #1
0
def _function_wrapper(args_package):
    """helper to wrap function evaluation
    TODO: use multiprocessing logger in case of error?
    multiprocessing.get_logger().error("f%r failed" % (arg_kwargs,))
    """
    (execute_key, funcname, args, kwargs) = args_package
    return (args_package, utils.func_exec(funcname, args, kwargs))
def _function_wrapper(args_package):
    """Allow multiprocessing Pool to call generic functions/args/kwargs.

    Data are saved here rather than handed back to avoid the scenario where
    all of the output from a batch run is held in memory.
    """
    (identifier, directory, funcname, args, kwargs) = args_package

    readable = utils.readable_call(funcname, args, kwargs)
    filename = "%s/%s.shelve" % (directory, identifier)
    filename = re.sub('/+', '/', filename)
    print "%s -> %s" % (readable, filename)

    result = utils.func_exec(funcname, args, kwargs, printcall=False)

    outfile = shelve.open(filename, 'n', protocol=-1)
    outfile["identifier"] = identifier
    outfile["filename"] = filename
    outfile["funcname"] = funcname
    outfile["args"] = args
    outfile["kwargs"] = kwargs
    outfile["result"] = result
    outfile.close()

    return identifier
def process_job(job_filename):
    basename = os.path.splitext(job_filename)[0]

    log_filename = "%s.log" % basename
    run_filename = "%s.run" % basename
    done_filename = "%s.done" % basename

    os.rename(job_filename, run_filename)
    jobspec = shelve.open(run_filename, protocol=-1)
    print jobspec
    funcname = jobspec['funcname']
    args = jobspec['args']
    kwargs = jobspec['kwargs']

    retval = utils.func_exec(funcname, args, kwargs)
    jobspec['retval'] = retval
    jobspec.close()

    outlog = open(log_filename, "w")
    outlog.write(sys.stdout.getvalue())
    outlog.close()

    os.rename(run_filename, done_filename)