Пример #1
0
from src.utils.utils import to_str, uses_database
from src.utils import logger

__all__ = ("to_pickle", "from_pickle", "do_pickle", "do_unpickle")

PICKLE_PROTOCOL = 2

# initialization and helpers

_GA = object.__getattribute__
_SA = object.__setattr__
_FROM_MODEL_MAP = None
_TO_MODEL_MAP = None
_TO_TYPECLASS = lambda o: hasattr(o, 'typeclass') and o.typeclass or o
_IS_PACKED_DBOBJ = lambda o: type(o) == tuple and len(o) == 4 and o[0] == '__packed_dbobj__'
if uses_database("mysql") and ServerConfig.objects.get_mysql_db_version() < '5.6.4':
    # mysql <5.6.4 don't support millisecond precision
    _DATESTRING = "%Y:%m:%d-%H:%M:%S:000000"
else:
    _DATESTRING = "%Y:%m:%d-%H:%M:%S:%f"


def _TO_DATESTRING(obj):
    """
    this will only be called with valid database objects. Returns datestring
    on correct form.
    """
    try:
        return _GA(obj, "db_date_created").strftime(_DATESTRING)
    except AttributeError:
        # this can happen if object is not yet saved - no datestring is then set
Пример #2
0
from src.utils import logger

__all__ = ("to_pickle", "from_pickle", "do_pickle", "do_unpickle")

PICKLE_PROTOCOL = 2

# initialization and helpers

_GA = object.__getattribute__
_SA = object.__setattr__
_FROM_MODEL_MAP = None
_TO_MODEL_MAP = None
_TO_TYPECLASS = lambda o: hasattr(o, 'typeclass') and o.typeclass or o
_IS_PACKED_DBOBJ = lambda o: type(o) == tuple and len(o) == 4 and o[
    0] == '__packed_dbobj__'
if uses_database(
        "mysql") and ServerConfig.objects.get_mysql_db_version() < '5.6.4':
    # mysql <5.6.4 don't support millisecond precision
    _DATESTRING = "%Y:%m:%d-%H:%M:%S:000000"
else:
    _DATESTRING = "%Y:%m:%d-%H:%M:%S:%f"


def _TO_DATESTRING(obj):
    """
    this will only be called with valid database objects. Returns datestring
    on correct form.
    """
    try:
        return _GA(obj, "db_date_created").strftime(_DATESTRING)
    except AttributeError:
        # this can happen if object is not yet saved - no datestring is then set
Пример #3
0
            # Set interactive state directly
            caller.cmdset.add(BatchInteractiveCmdSet)

            caller.msg(
                "\nBatch-command processor - Interactive mode for %s ..." %
                python_path)
            show_curr(caller)
        else:
            caller.msg(
                "Running Batch-command processor - Automatic mode for %s (this might take some time) ..."
                % python_path)

            procpool = False
            if "PythonProcPool" in utils.server_services():
                if utils.uses_database("sqlite3"):
                    caller.msg(
                        "Batchprocessor disabled ProcPool under SQLite3.")
                else:
                    procpool = True

            if procpool:
                # run in parallel process
                def callback(r):
                    caller.msg("  {GBatchfile '%s' applied." % python_path)
                    purge_processor(caller)

                def errback(e):
                    caller.msg("  {RError from processor: '%s'" % e)
                    purge_processor(caller)
Пример #4
0
        caller.cmdset.add(BatchSafeCmdSet)

        if 'inter' in switches or 'interactive' in switches:
            # Allow more control over how batch file is executed

            # Set interactive state directly
            caller.cmdset.add(BatchInteractiveCmdSet)

            caller.msg("\nBatch-command processor - Interactive mode for %s ..." % python_path)
            show_curr(caller)
        else:
            caller.msg("Running Batch-command processor - Automatic mode for %s (this might take some time) ..." % python_path)

            procpool = False
            if "PythonProcPool" in utils.server_services():
                if utils.uses_database("sqlite3"):
                    caller.msg("Batchprocessor disabled ProcPool under SQLite3.")
                else:
                    procpool = True

            if procpool:
                # run in parallel process
                def callback(r):
                    caller.msg("  {GBatchfile '%s' applied." % python_path)
                    purge_processor(caller)

                def errback(e):
                    caller.msg("  {RError from processor: '%s'" % e)
                    purge_processor(caller)

                utils.run_async(_PROCPOOL_BATCHCMD_SOURCE,
Пример #5
0

__all__ = ("to_pickle", "from_pickle", "do_pickle", "do_unpickle")

PICKLE_PROTOCOL = 2

# initialization and helpers

_GA = object.__getattribute__
_SA = object.__setattr__
_FROM_MODEL_MAP = None
_TO_MODEL_MAP = None
_TO_TYPECLASS = lambda o: hasattr(o, 'typeclass') and o.typeclass or o
_IS_PACKED_DBOBJ = lambda o: type(o) == tuple and len(o) == 4 and o[0] == '__packed_dbobj__'
_TO_DATESTRING = lambda o: _GA(o, "db_date_created").strftime("%Y:%m:%d-%H:%M:%S:%f")
if uses_database("mysql"):
    from src.server.models import ServerConfig
    mysql_version = ServerConfig.objects.get_mysql_db_version()
    if mysql_version < '5.6.4':
        # mysql <5.6.4 don't support millisecond precision
        _TO_DATESTRING = lambda o: _GA(o, "db_date_created").strftime("%Y:%m:%d-%H:%M:%S:000000")

def _init_globals():
    "Lazy importing to avoid circular import issues"
    global _FROM_MODEL_MAP, _TO_MODEL_MAP
    if not _FROM_MODEL_MAP:
        _FROM_MODEL_MAP = defaultdict(str)
        _FROM_MODEL_MAP.update(dict((c.model, c.natural_key()) for c in ContentType.objects.all()))
    if not _TO_MODEL_MAP:
        _TO_MODEL_MAP = defaultdict(str)
        _TO_MODEL_MAP.update(dict((c.natural_key(), c.model_class()) for c in ContentType.objects.all()))