def test1(obj): _obj = source._wrap(obj) assert _obj(1.57) == obj(1.57) src = source.getsource(obj, alias="_f") exec src in globals(), locals() assert _f(1.57) == obj(1.57) name = source._get_name(obj) assert name == obj.__name__ or src.split("=", 1)[0].strip()
def __dumpsfunc(self, funcs, modules): """Serializes functions and modules""" hashs = hash(funcs + modules) if hashs not in self.__sfuncHM: sources = [self.__get_source(func) for func in funcs] # should probably just 'try' above, if fail rely on dill.dumps self.__sfuncHM[hashs] = pickle.dumps( (_get_name(funcs[0]), sources, modules), self.__pickle_proto) return self.__sfuncHM[hashs]
def submit(self, func, args=(), depfuncs=(), modules=(), callback=None, callbackargs=(), group='default', globals=None): """Submits function to the execution queue func - function to be executed args - tuple with arguments of the 'func' depfuncs - tuple with functions which might be called from 'func' modules - tuple with module names to import callback - callback function which will be called with argument list equal to callbackargs+(result,) as soon as calculation is done callbackargs - additional arguments for callback function group - job group, is used when wait(group) is called to wait for jobs in a given group to finish globals - dictionary from which all modules, functions and classes will be imported, for instance: globals=globals() """ # perform some checks for frequent mistakes if self.__exiting: raise RuntimeError("Cannot submit jobs: server"\ " instance has been destroyed") if not isinstance(args, tuple): raise TypeError("args argument must be a tuple") if not isinstance(depfuncs, tuple): raise TypeError("depfuncs argument must be a tuple") if not isinstance(modules, tuple): raise TypeError("modules argument must be a tuple") if not isinstance(callbackargs, tuple): raise TypeError("callbackargs argument must be a tuple") for module in modules: if not isinstance(module, types.StringType): raise TypeError("modules argument must be a list of strings") tid = self.__gentid() if globals: modules += tuple(self.__find_modules("", globals)) modules = tuple(set(modules)) self.__logger.debug("Task %i will autoimport next modules: %s" % (tid, str(modules))) for object1 in globals.values(): if isinstance(object1, types.FunctionType) \ or isinstance(object1, types.ClassType): depfuncs += (object1, ) task = _Task(self, tid, callback, callbackargs, group) self.__waittasks_lock.acquire() self.__waittasks.append(task) self.__waittasks_lock.release() # if the function is a method of a class add self to the arguments list if isinstance(func, types.MethodType) and func.im_self is not None: args = (func.im_self, ) + args # if there is an instance of a user deined class in the arguments add # whole class to dependancies for arg in args: # Checks for both classic or new class instances if isinstance(arg, types.InstanceType) \ or str(type(arg))[:6] == "<class": depfuncs += (arg.__class__, ) # if there is a function in the arguments add this # function to dependancies for arg in args: if isinstance(arg, types.FunctionType): depfuncs += (arg, ) sfunc = self.__dumpsfunc((func, ) + depfuncs, modules) sargs = pickle.dumps(args, self.__pickle_proto) self.__queue_lock.acquire() self.__queue.append((task, sfunc, sargs)) self.__queue_lock.release() self.__logger.debug("Task %i submited, function='%s'" % (tid, _get_name(func))) self.__scheduler() return task