def submit(self, func, args=(), depfuncs=(), modules=(), callback=None, callbackargs=(), group='default', globals=None, functionToSkip=None): """Submits function to the execution queue func - function to be executed args - tuple with arguments of the 'func' depfuncs - tuple with functions which might be called from 'func' modules - tuple with module names to import callback - callback function which will be called with argument list equal to callbackargs+(result,) as soon as calculation is done callbackargs - additional arguments for callback function group - job group, is used when wait(group) is called to wait for jobs in a given group to finish globals - dictionary from which all modules, functions and classes will be imported, for instance: globals=globals() functionToSkip - list of functions that need to be skipped from pickling """ # perform some checks for frequent mistakes if self._exiting: raise DestroyedServerError("Cannot submit jobs: server"\ " instance has been destroyed") if not isinstance(args, tuple): raise TypeError("args argument must be a tuple") if not isinstance(depfuncs, tuple): raise TypeError("depfuncs argument must be a tuple") if not isinstance(modules, tuple): raise TypeError("modules argument must be a tuple") if not isinstance(callbackargs, tuple): raise TypeError("callbackargs argument must be a tuple") if globals is not None and not isinstance(globals, dict): raise TypeError("globals argument must be a dictionary") for module in modules: if not isinstance(module, str): raise TypeError("modules argument must be a list of strings") tid = self.__gentid() other_type = types.FunctionType if six.PY3 else types.ClassType if globals: modules += tuple(self.__find_modules("", globals)) modules = tuple(set(modules)) self.logger.debug("Task %i will autoimport next modules: %s" % (tid, str(modules))) for object1 in globals.values(): if isinstance(object1, types.FunctionType) \ or isinstance(object1, other_type): depfuncs += (object1, ) task = _Task(self, tid, callback, callbackargs, group) self.__waittasks_lock.acquire() self.__waittasks.append(task) self.__waittasks_lock.release() # if the function is a method of a class add self to the arguments list if isinstance(func, types.MethodType): func_self = func.__self__ if six.PY3 else func.im_self if func_self is not None: args = (func_self, ) + args # if there is an instance of a user defined class in the arguments add # whole class to dependancies for arg in args: # Checks for both classic or new class instances if (six.PY2 and isinstance(arg, types.InstanceType)) \ or str(type(arg))[:6] == "<class": # in PY3, all instances are <class... so skip the builtins if getattr(inspect.getmodule(arg), '__name__', None) \ in ['builtins', '__builtin__', None]: pass # do not include source for imported modules elif ppc.is_not_imported(arg, modules): #RAVEN CHANGE: adding functionToSkip removing. clshier = ppc.get_class_hierarchy(arg.__class__) if functionToSkip != None: tempclshier = [] clshierstr = [ str(elem) for elem in clshier ] # we use string in order to avoid baseclass identity! functionToSkipSet = set( [str(elem) for elem in functionToSkip]) for cnt, clshierfun in enumerate(clshierstr): if clshierfun not in functionToSkipSet: tempclshier.append(clshier[cnt]) clshier = tempclshier depfuncs += tuple(clshier) # if there is a function in the arguments add this # function to dependancies for arg in args: if isinstance(arg, types.FunctionType): depfuncs += (arg, ) sfunc = self.__dumpsfunc((func, ) + depfuncs, modules) sargs = pickle.dumps(args, self.__pickle_proto) self.__queue_lock.acquire() self.__queue.append((task, sfunc, sargs)) self.__queue_lock.release() self.logger.debug("Task %i submited, function='%s'" % (tid, getname(func))) self.__scheduler() return task
def submit(self, func, args=(), depfuncs=(), modules=(), callback=None, callbackargs=(), group='default', globals=None): """Submits function to the execution queue func - function to be executed args - tuple with arguments of the 'func' depfuncs - tuple with functions which might be called from 'func' modules - tuple with module names to import callback - callback function which will be called with argument list equal to callbackargs+(result,) as soon as calculation is done callbackargs - additional arguments for callback function group - job group, is used when wait(group) is called to wait for jobs in a given group to finish globals - dictionary from which all modules, functions and classes will be imported, for instance: globals=globals() """ # perform some checks for frequent mistakes if self._exiting: raise DestroyedServerError("Cannot submit jobs: server"\ " instance has been destroyed") if not isinstance(args, tuple): raise TypeError("args argument must be a tuple") if not isinstance(depfuncs, tuple): raise TypeError("depfuncs argument must be a tuple") if not isinstance(modules, tuple): raise TypeError("modules argument must be a tuple") if not isinstance(callbackargs, tuple): raise TypeError("callbackargs argument must be a tuple") if globals is not None and not isinstance(globals, dict): raise TypeError("globals argument must be a dictionary") for module in modules: if not isinstance(module, types.StringType): raise TypeError("modules argument must be a list of strings") tid = self.__gentid() if globals: modules += tuple(self.__find_modules("", globals)) modules = tuple(set(modules)) self.logger.debug("Task %i will autoimport next modules: %s" % (tid, str(modules))) for object1 in globals.values(): if isinstance(object1, types.FunctionType) \ or isinstance(object1, types.ClassType): depfuncs += (object1, ) task = _Task(self, tid, callback, callbackargs, group) self.__waittasks_lock.acquire() self.__waittasks.append(task) self.__waittasks_lock.release() # if the function is a method of a class add self to the arguments list if isinstance(func, types.MethodType) and func.im_self is not None: args = (func.im_self, ) + args # if there is an instance of a user deined class in the arguments add # whole class to dependancies for arg in args: # Checks for both classic or new class instances if isinstance(arg, types.InstanceType) \ or str(type(arg))[:6] == "<class": # do not include source for imported modules if ppcommon.is_not_imported(arg, modules): depfuncs += tuple(ppcommon.get_class_hierarchy(arg.__class__)) # if there is a function in the arguments add this # function to dependancies for arg in args: if isinstance(arg, types.FunctionType): depfuncs += (arg, ) sfunc = self.__dumpsfunc((func, ) + depfuncs, modules) sargs = pickle.dumps(args, self.__pickle_proto) self.__queue_lock.acquire() self.__queue.append((task, sfunc, sargs)) self.__queue_lock.release() self.logger.debug("Task %i submited, function='%s'" % (tid, func.func_name)) self.__scheduler() return task
def submit(self, func, args=(), depfuncs=(), modules=(), callback=None, callbackargs=(), group='default', globals=None): """Submits function to the execution queue func - function to be executed args - tuple with arguments of the 'func' depfuncs - tuple with functions which might be called from 'func' modules - tuple with module names to import callback - callback function which will be called with argument list equal to callbackargs+(result,) as soon as calculation is done callbackargs - additional arguments for callback function group - job group, is used when wait(group) is called to wait for jobs in a given group to finish globals - dictionary from which all modules, functions and classes will be imported, for instance: globals=globals() """ # perform some checks for frequent mistakes if self._exiting: raise DestroyedServerError("Cannot submit jobs: server"\ " instance has been destroyed") if not isinstance(args, tuple): raise TypeError("args argument must be a tuple") if not isinstance(depfuncs, tuple): raise TypeError("depfuncs argument must be a tuple") if not isinstance(modules, tuple): raise TypeError("modules argument must be a tuple") if not isinstance(callbackargs, tuple): raise TypeError("callbackargs argument must be a tuple") if globals is not None and not isinstance(globals, dict): raise TypeError("globals argument must be a dictionary") for module in modules: if not isinstance(module, types.StringType): raise TypeError("modules argument must be a list of strings") tid = self.__gentid() if globals: modules += tuple(self.__find_modules("", globals)) modules = tuple(set(modules)) self.logger.debug("Task %i will autoimport next modules: %s" % (tid, str(modules))) for object1 in globals.values(): if isinstance(object1, types.FunctionType) \ or isinstance(object1, types.ClassType): depfuncs += (object1, ) task = _Task(self, tid, callback, callbackargs, group) self.__waittasks_lock.acquire() self.__waittasks.append(task) self.__waittasks_lock.release() # if the function is a method of a class add self to the arguments list if isinstance(func, types.MethodType) and func.im_self is not None: args = (func.im_self, ) + args # if there is an instance of a user deined class in the arguments add # whole class to dependancies for arg in args: # Checks for both classic or new class instances if isinstance(arg, types.InstanceType) \ or str(type(arg))[:6] == "<class": # do not include source for imported modules if ppcommon.is_not_imported(arg, modules): depfuncs += tuple( ppcommon.get_class_hierarchy(arg.__class__)) # if there is a function in the arguments add this # function to dependancies for arg in args: if isinstance(arg, types.FunctionType): depfuncs += (arg, ) sfunc = self.__dumpsfunc((func, ) + depfuncs, modules) sargs = pickle.dumps(args, self.__pickle_proto) self.__queue_lock.acquire() self.__queue.append((task, sfunc, sargs)) self.__queue_lock.release() self.logger.debug("Task %i submited, function='%s'" % (tid, func.func_name)) self.__scheduler() return task