def get_returns(self):
     lr = len(self.returns)
     val = lr and (lr == 1 and self.returns[0] or self.returns) or None
     if val not in (None, [], ()):
         return do_pickle(to_pickle(val))
     else:
         return ""
示例#2
0
 def get_returns(self):
     lr = len(self.returns)
     val = lr and (lr == 1 and self.returns[0] or self.returns) or None
     if val not in (None, [], ()):
         return do_pickle(to_pickle(val))
     else:
         return ""
示例#3
0
    def executecode(self, source, environment):
        """
        Remote code execution

        source - Python code snippet
        environment - pickled dictionary of environment
                      variables. They are stored in
                      two keys "normal" and "objs" where
                      normal holds a dictionary of
                      normally pickled python objects
                      wheras objs points to a dictionary
                      of database represenations ((app,key),id).

        The environment's entries will be made available as
        local variables during the execution. Normal eval
        results will be returned as-is. For more complex
        code snippets (run by exec), the _return function
        is available: All data sent to _return(retval) will
        be returned from this system whenever the system
        finishes. Multiple calls to _return will result in
        a list being return. The return value is pickled
        and thus allows for returning any pickleable data.

        """

        class Ret(object):
            "Helper class for holding returns from exec"

            def __init__(self):
                self.returns = []

            def __call__(self, *args, **kwargs):
                self.returns.extend(list(args))

            def get_returns(self):
                lr = len(self.returns)
                val = lr and (lr == 1 and self.returns[0] or self.returns) or None
                if val not in (None, [], ()):
                    return do_pickle(to_pickle(val))
                else:
                    return ""

        _return = Ret()

        available_vars = {"_return": _return}
        if environment:
            # load environment
            try:
                environment = from_pickle(do_unpickle(environment))
                available_vars.update(environment)
            except Exception:
                logger.log_trace()
        # try to execute with eval first
        try:
            ret = eval(source, {}, available_vars)
            if ret not in (None, [], ()):
                ret = _return.get_returns() or do_pickle(to_pickle(ret))
            else:
                ret = ""
        except Exception:
            # use exec instead
            exec source in available_vars
            ret = _return.get_returns()
        # get the list of affected objects to recache
        objs = PROC_MODIFIED_OBJS.values()
        # we need to include the locations too, to update their content caches
        objs = objs + list(set([o.location for o in objs if hasattr(o, "location") and o.location]))
        # print "objs:", objs
        # print "to_pickle", to_pickle(objs, emptypickle=False, do_pickle=False)
        if objs not in (None, [], ()):
            to_recache = do_pickle(to_pickle(objs))
        else:
            to_recache = ""
        # empty the list without loosing memory reference
        # PROC_MODIFIED_OBJS[:] = []
        PROC_MODIFIED_OBJS.clear()  # TODO - is this not messing anything up?
        return {"response": ret, "recached": to_recache}
示例#4
0
def run_async(to_execute, *args, **kwargs):
    """
    Runs a function or executes a code snippet asynchronously.

    Inputs:
    to_execute (callable) - if this is a callable, it will
            be executed with *args and non-reserver *kwargs as
            arguments.
            The callable will be executed using ProcPool, or in
            a thread if ProcPool is not available.
    to_execute (string) - this is only available is ProcPool is
            running. If a string, to_execute this will be treated as a code
            snippet to execute asynchronously. *args are then not used
            and non-reserverd *kwargs are used to define the execution
            environment made available to the code.

    reserved kwargs:
        'use_thread' (bool) - this only works with callables (not code).
                     It forces the code to run in a thread instead
                     of using the Process Pool, even if the latter
                     is available. This could be useful if you want
                     to make sure to not get out of sync with the
                     main process (such as accessing in-memory global
                     properties)
        'proc_timeout' (int) - only used if ProcPool is available. Sets a
                     max time for execution. This alters the value set
                     by settings.PROCPOOL_TIMEOUT
        'at_return' -should point to a callable with one argument.
                    It will be called with the return value from
                    to_execute.
        'at_return_kwargs' - this dictionary which be used as keyword
                             arguments to the at_return callback.
        'at_err' - this will be called with a Failure instance if
                       there is an error in to_execute.
        'at_err_kwargs' - this dictionary will be used as keyword
                          arguments to the at_err errback.
        'procpool_name' - the Service name of the procpool to use.
                          Default is PythonProcPool.

    *args   - if to_execute is a callable, these args will be used
              as arguments for that function. If to_execute is a string
              *args are not used.
    *kwargs - if to_execute is a callable, these kwargs will be used
              as keyword arguments in that function. If a string, they
              instead are used to define the executable environment
              that should be available to execute the code in to_execute.

    run_async will either relay the code to a thread or to a processPool
    depending on input and what is available in the system. To activate
    Process pooling, settings.PROCPOOL_ENABLE must be set.

    to_execute in string form should handle all imports needed. kwargs
    can be used to send objects and properties. Such properties will
    be pickled, except Database Objects which will be sent across
    on a special format and re-loaded on the other side.

    To get a return value from your code snippet, Use the _return()
    function: Every call to this function from your snippet will
    append the argument to an internal list of returns. This return value
    (or a list) will be the first argument to the at_return callback.

    Use this function with restrain and only for features/commands
    that you know has no influence on the cause-and-effect order of your
    game (commands given after the async function might be executed before
    it has finished). Accessing the same property from different
    threads/processes can lead to unpredicted behaviour if you are not
    careful (this is called a "race condition").

    Also note that some databases, notably sqlite3, don't support access from
    multiple threads simultaneously, so if you do heavy database access from
    your to_execute under sqlite3 you will probably run very slow or even get
    tracebacks.

    """
    # handle all global imports.
    global _PPOOL, _SESSIONS

    # get the procpool name, if set in kwargs
    procpool_name = kwargs.get("procpool_name", "PythonProcPool")

    if _PPOOL is None:
        # Try to load process Pool
        from src.server.sessionhandler import SESSIONS as _SESSIONS

        try:
            _PPOOL = _SESSIONS.server.services.namedServices.get(procpool_name).pool
        except AttributeError:
            _PPOOL = False

    use_timeout = kwargs.pop("proc_timeout", _PPOOL.timeout)

    # helper converters for callbacks/errbacks
    def convert_return(f):
        def func(ret, *args, **kwargs):
            rval = ret["response"] and from_pickle(do_unpickle(ret["response"]))
            reca = ret["recached"] and from_pickle(do_unpickle(ret["recached"]))
            # recache all indicated objects
            [clean_object_caches(obj) for obj in reca]
            if f:
                return f(rval, *args, **kwargs)
            else:
                return rval

        return func

    def convert_err(f):
        def func(err, *args, **kwargs):
            err.trap(Exception)
            err = err.getErrorMessage()
            if use_timeout and err == _PROC_ERR:
                err = "Process took longer than %ss and timed out." % use_timeout
            if f:
                return f(err, *args, **kwargs)
            else:
                err = "Error reported from subprocess: '%s'" % err
                logger.log_errmsg(err)

        return func

    # handle special reserved input kwargs
    use_thread = kwargs.pop("use_thread", False)
    callback = convert_return(kwargs.pop("at_return", None))
    errback = convert_err(kwargs.pop("at_err", None))
    callback_kwargs = kwargs.pop("at_return_kwargs", {})
    errback_kwargs = kwargs.pop("at_err_kwargs", {})

    if _PPOOL and not use_thread:
        # process pool is running
        if isinstance(to_execute, basestring):
            # run source code in process pool
            cmdargs = {"_timeout": use_timeout}
            cmdargs["source"] = to_str(to_execute)
            if kwargs:
                cmdargs["environment"] = do_pickle(to_pickle(kwargs))
            else:
                cmdargs["environment"] = ""
            # defer to process pool
            deferred = _PPOOL.doWork(ExecuteCode, **cmdargs)
        elif callable(to_execute):
            # execute callable in process
            callname = to_execute.__name__
            cmdargs = {"_timeout": use_timeout}
            cmdargs["source"] = "_return(%s(*args,**kwargs))" % callname
            cmdargs["environment"] = do_pickle(to_pickle({callname: to_execute, "args": args, "kwargs": kwargs}))
            deferred = _PPOOL.doWork(ExecuteCode, **cmdargs)
        else:
            raise RuntimeError("'%s' could not be handled by the process pool" % to_execute)
    elif callable(to_execute):
        # no process pool available, fall back to old deferToThread mechanism.
        deferred = threads.deferToThread(to_execute, *args, **kwargs)
    else:
        # no appropriate input for this server setup
        raise RuntimeError("'%s' could not be handled by run_async - no valid input or no process pool." % to_execute)

    # attach callbacks
    if callback:
        deferred.addCallback(callback, **callback_kwargs)
    deferred.addErrback(errback, **errback_kwargs)
示例#5
0
 def wrapper(self, *args, **kwargs):
     "wrap all queries searching the db_value field in some way"
     self.__doc__ = method.__doc__
     for key in (key for key in kwargs if key.startswith('db_value')):
         kwargs[key] = to_pickle(kwargs[key])
     return method(self, *args, **kwargs)
示例#6
0
    def executecode(self, source, environment):
        """
        Remote code execution

        source - Python code snippet
        environment - pickled dictionary of environment
                      variables. They are stored in
                      two keys "normal" and "objs" where
                      normal holds a dictionary of
                      normally pickled python objects
                      wheras objs points to a dictionary
                      of database represenations ((app,key),id).

        The environment's entries will be made available as
        local variables during the execution. Normal eval
        results will be returned as-is. For more complex
        code snippets (run by exec), the _return function
        is available: All data sent to _return(retval) will
        be returned from this system whenever the system
        finishes. Multiple calls to _return will result in
        a list being return. The return value is pickled
        and thus allows for returning any pickleable data.

        """
        class Ret(object):
            "Helper class for holding returns from exec"

            def __init__(self):
                self.returns = []

            def __call__(self, *args, **kwargs):
                self.returns.extend(list(args))

            def get_returns(self):
                lr = len(self.returns)
                val = lr and (lr == 1 and self.returns[0]
                              or self.returns) or None
                if val not in (None, [], ()):
                    return do_pickle(to_pickle(val))
                else:
                    return ""

        _return = Ret()

        available_vars = {'_return': _return}
        if environment:
            # load environment
            try:
                environment = from_pickle(do_unpickle(environment))
                available_vars.update(environment)
            except Exception:
                logger.log_trace()
        # try to execute with eval first
        try:
            ret = eval(source, {}, available_vars)
            if ret not in (None, [], ()):
                ret = _return.get_returns() or do_pickle(to_pickle(ret))
            else:
                ret = ""
        except Exception:
            # use exec instead
            exec source in available_vars
            ret = _return.get_returns()
        # get the list of affected objects to recache
        objs = list(set(PROC_MODIFIED_OBJS))
        # we need to include the locations too, to update their content caches
        objs = objs + list(
            set([
                o.location
                for o in objs if hasattr(o, "location") and o.location
            ]))
        #print "objs:", objs
        #print "to_pickle", to_pickle(objs, emptypickle=False, do_pickle=False)
        if objs not in (None, [], ()):
            to_recache = do_pickle(to_pickle(objs))
        else:
            to_recache = ""
        # empty the list without loosing memory reference
        PROC_MODIFIED_OBJS[:] = []
        return {'response': ret, 'recached': to_recache}
示例#7
0
def run_async(to_execute, *args, **kwargs):
    """
    Runs a function or executes a code snippet asynchronously.

    Inputs:
    to_execute (callable) - if this is a callable, it will
            be executed with *args and non-reserver *kwargs as
            arguments.
            The callable will be executed using ProcPool, or in
            a thread if ProcPool is not available.
    to_execute (string) - this is only available is ProcPool is
            running. If a string, to_execute this will be treated as a code
            snippet to execute asynchronously. *args are then not used
            and non-reserverd *kwargs are used to define the execution
            environment made available to the code.

    reserved kwargs:
        'use_thread' (bool) - this only works with callables (not code).
                     It forces the code to run in a thread instead
                     of using the Process Pool, even if the latter
                     is available. This could be useful if you want
                     to make sure to not get out of sync with the
                     main process (such as accessing in-memory global
                     properties)
        'proc_timeout' (int) - only used if ProcPool is available. Sets a
                     max time for execution. This alters the value set
                     by settings.PROCPOOL_TIMEOUT
        'at_return' -should point to a callable with one argument.
                    It will be called with the return value from
                    to_execute.
        'at_return_kwargs' - this dictionary which be used as keyword
                             arguments to the at_return callback.
        'at_err' - this will be called with a Failure instance if
                       there is an error in to_execute.
        'at_err_kwargs' - this dictionary will be used as keyword
                          arguments to the at_err errback.
        'procpool_name' - the Service name of the procpool to use.
                          Default is PythonProcPool.

    *args   - if to_execute is a callable, these args will be used
              as arguments for that function. If to_execute is a string
              *args are not used.
    *kwargs - if to_execute is a callable, these kwargs will be used
              as keyword arguments in that function. If a string, they
              instead are used to define the executable environment
              that should be available to execute the code in to_execute.

    run_async will either relay the code to a thread or to a processPool
    depending on input and what is available in the system. To activate
    Process pooling, settings.PROCPOOL_ENABLE must be set.

    to_execute in string form should handle all imports needed. kwargs
    can be used to send objects and properties. Such properties will
    be pickled, except Database Objects which will be sent across
    on a special format and re-loaded on the other side.

    To get a return value from your code snippet, Use the _return()
    function: Every call to this function from your snippet will
    append the argument to an internal list of returns. This return value
    (or a list) will be the first argument to the at_return callback.

    Use this function with restrain and only for features/commands
    that you know has no influence on the cause-and-effect order of your
    game (commands given after the async function might be executed before
    it has finished). Accessing the same property from different
    threads/processes can lead to unpredicted behaviour if you are not
    careful (this is called a "race condition").

    Also note that some databases, notably sqlite3, don't support access from
    multiple threads simultaneously, so if you do heavy database access from
    your to_execute under sqlite3 you will probably run very slow or even get
    tracebacks.

    """
    # handle all global imports.
    global _PPOOL, _SESSIONS

    # get the procpool name, if set in kwargs
    procpool_name = kwargs.get("procpool_name", "PythonProcPool")

    if _PPOOL is None:
        # Try to load process Pool
        from src.server.sessionhandler import SESSIONS as _SESSIONS
        try:
            _PPOOL = _SESSIONS.server.services.namedServices.get(
                procpool_name).pool
        except AttributeError:
            _PPOOL = False

    use_timeout = kwargs.pop("proc_timeout", _PPOOL.timeout)

    # helper converters for callbacks/errbacks
    def convert_return(f):
        def func(ret, *args, **kwargs):
            rval = ret["response"] and from_pickle(do_unpickle(
                ret["response"]))
            reca = ret["recached"] and from_pickle(do_unpickle(
                ret["recached"]))
            # recache all indicated objects
            [clean_object_caches(obj) for obj in reca]
            if f:
                return f(rval, *args, **kwargs)
            else:
                return rval

        return func

    def convert_err(f):
        def func(err, *args, **kwargs):
            err.trap(Exception)
            err = err.getErrorMessage()
            if use_timeout and err == _PROC_ERR:
                err = "Process took longer than %ss and timed out." % use_timeout
            if f:
                return f(err, *args, **kwargs)
            else:
                err = "Error reported from subprocess: '%s'" % err
                logger.log_errmsg(err)

        return func

    # handle special reserved input kwargs
    use_thread = kwargs.pop("use_thread", False)
    callback = convert_return(kwargs.pop("at_return", None))
    errback = convert_err(kwargs.pop("at_err", None))
    callback_kwargs = kwargs.pop("at_return_kwargs", {})
    errback_kwargs = kwargs.pop("at_err_kwargs", {})

    if _PPOOL and not use_thread:
        # process pool is running
        if isinstance(to_execute, basestring):
            # run source code in process pool
            cmdargs = {"_timeout": use_timeout}
            cmdargs["source"] = to_str(to_execute)
            if kwargs:
                cmdargs["environment"] = do_pickle(to_pickle(kwargs))
            else:
                cmdargs["environment"] = ""
            # defer to process pool
            deferred = _PPOOL.doWork(ExecuteCode, **cmdargs)
        elif callable(to_execute):
            # execute callable in process
            callname = to_execute.__name__
            cmdargs = {"_timeout": use_timeout}
            cmdargs["source"] = "_return(%s(*args,**kwargs))" % callname
            cmdargs["environment"] = do_pickle(
                to_pickle({
                    callname: to_execute,
                    "args": args,
                    "kwargs": kwargs
                }))
            deferred = _PPOOL.doWork(ExecuteCode, **cmdargs)
        else:
            raise RuntimeError(
                "'%s' could not be handled by the process pool" % to_execute)
    elif callable(to_execute):
        # no process pool available, fall back to old deferToThread mechanism.
        deferred = threads.deferToThread(to_execute, *args, **kwargs)
    else:
        # no appropriate input for this server setup
        raise RuntimeError(
            "'%s' could not be handled by run_async - no valid input or no process pool."
            % to_execute)

    # attach callbacks
    if callback:
        deferred.addCallback(callback, **callback_kwargs)
    deferred.addErrback(errback, **errback_kwargs)
示例#8
0
    def forwards(self, orm):
        "Write your forwards methods here."

        # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."

        # modified for migration - converts to plain python properties
        def from_attr(datatuple):
            """
            Retrieve data from a previously stored attribute. This
            is always a dict with keys type and data.

            datatuple comes from the database storage and has
            the following format:
               (simple|dbobj|iter, <data>)
            where
                simple - a single non-db object, like a string. is returned as-is.
                dbobj - a single dbobj-id. This id is retrieved back from the database.
                iter - an iterable. This is traversed iteratively, converting all found
                       dbobj-ids back to objects. Also, all lists and dictionaries are
                       returned as their PackedList/PackedDict counterparts in order to
                       allow in-place assignment such as obj.db.mylist[3] = val. Mylist
                       is then a PackedList that saves the data on the fly.
            """

            # nested functions
            def id2db(data):
                """
                Convert db-stored dbref back to object
                """
                mclass = CTYPEGET(model=data.db_model).model_class()
                try:
                    return mclass.objects.get(id=data.id)

                except AttributeError:
                    try:
                        return mclass.objects.get(id=data.id)
                    except mclass.DoesNotExist:  # could happen if object was deleted in the interim.
                        return None

            def iter_id2db(item):
                """
                Recursively looping through stored iterables, replacing ids with actual objects.
                We return PackedDict and PackedLists instead of normal lists; this is needed in order for
                the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is
                stored in the database are however always normal python primitives.
                """
                dtype = type(item)
                if dtype in (
                        basestring, int, float, long,
                        bool):  # check the most common types first, for speed
                    return item
                elif dtype == PackedDBobject or hasattr(
                        item, '__class__'
                ) and item.__class__.__name__ == "PackedDBobject":
                    return id2db(item)
                elif dtype == tuple:
                    return tuple([iter_id2db(val) for val in item])
                elif dtype in (dict, PackedDict):
                    return dict(
                        zip([key for key in item.keys()],
                            [iter_id2db(val) for val in item.values()]))
                elif hasattr(item, '__iter__'):
                    return list(iter_id2db(val) for val in item)
                else:
                    return item

            typ, data = datatuple

            if typ == 'simple':
                # single non-db objects
                return data
            elif typ == 'dbobj':
                # a single stored dbobj
                return id2db(data)
            elif typ == 'iter':
                # all types of iterables
                return iter_id2db(data)

        if not db.dry_run:
            for attr in orm['players.PlayerAttribute'].objects.all():
                # repack attr into new format and reimport
                datatuple = loads(to_str(attr.db_value))
                python_data = from_attr(datatuple)
                new_data = to_pickle(python_data)
                attr.db_value2 = new_data  # new pickleObjectField
                attr.save()

        def backwards(self, orm):
            "Write your backwards methods here."
            raise RuntimeError("This migration cannot be reversed.")
示例#9
0
    def forwards(self, orm):
        "Write your forwards methods here."
        # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."

        # modified for migration - converts to plain python properties
        def from_attr(datatuple):
            """
            Retrieve data from a previously stored attribute. This
            is always a dict with keys type and data.

            datatuple comes from the database storage and has
            the following format:
               (simple|dbobj|iter, <data>)
            where
                simple - a single non-db object, like a string. is returned as-is.
                dbobj - a single dbobj-id. This id is retrieved back from the database.
                iter - an iterable. This is traversed iteratively, converting all found
                       dbobj-ids back to objects. Also, all lists and dictionaries are
                       returned as their PackedList/PackedDict counterparts in order to
                       allow in-place assignment such as obj.db.mylist[3] = val. Mylist
                       is then a PackedList that saves the data on the fly.
            """
            # nested functions
            def id2db(data):
                """
                Convert db-stored dbref back to object
                """
                mclass = CTYPEGET(model=data.db_model).model_class()
                try:
                    return mclass.objects.get(id=data.id)

                except AttributeError:
                    try:
                        return mclass.objects.get(id=data.id)
                    except mclass.DoesNotExist: # could happen if object was deleted in the interim.
                        return None

            def iter_id2db(item):
                """
                Recursively looping through stored iterables, replacing ids with actual objects.
                We return PackedDict and PackedLists instead of normal lists; this is needed in order for
                the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is
                stored in the database are however always normal python primitives.
                """
                dtype = type(item)
                if dtype in (basestring, int, float, long, bool): # check the most common types first, for speed
                    return item
                elif dtype == PackedDBobject or hasattr(item, '__class__') and item.__class__.__name__ == "PackedDBobject":
                    return id2db(item)
                elif dtype == tuple:
                    return tuple([iter_id2db(val) for val in item])
                elif dtype in (dict, PackedDict):
                    return dict(zip([key for key in item.keys()],
                                                     [iter_id2db(val) for val in item.values()]))
                elif hasattr(item, '__iter__'):
                    return list(iter_id2db(val) for val in item)
                else:
                    return item

            typ, data = datatuple

            if typ == 'simple':
                # single non-db objects
                return data
            elif typ == 'dbobj':
                # a single stored dbobj
                return id2db(data)
            elif typ == 'iter':
                # all types of iterables
                return iter_id2db(data)

        if not db.dry_run:
            for attr in orm['scripts.ScriptAttribute'].objects.all():
                # repack attr into new format and reimport
                datatuple = loads(to_str(attr.db_value))
                python_data = from_attr(datatuple)
                new_data = to_pickle(python_data)
                attr.db_value2 = new_data # new pickleObjectField
                attr.save()