예제 #1
0
파일: dstream.py 프로젝트: szkb/dpark
from dpark.file_manager import open_file
from dpark.file_manager.utils import Error

try:
    from scribe.scribe import Iface, ResultCode, Processor
    from thrift.protocol import TBinaryProtocol
    from thrift.transport import TSocket
    from thrift.server import TNonblockingServer
    from collections import deque
    from kazoo.client import KazooClient

    WITH_SCRIBE = True
except:
    WITH_SCRIBE = False

logger = get_logger(__name__)


class Interval(object):
    def __init__(self, beginTime, endTime):
        self.begin = beginTime
        self.end = endTime

    @property
    def duration(self):
        return self.end - self.begin

    def __add__(self, d):
        return Interval(self.begin + d, self.end + d)

    def __sub__(self, d):
예제 #2
0
파일: cache.py 프로젝트: rohithreddy/dpark
from six.moves import cPickle
import shutil
import struct

import msgpack

from dpark.env import env
from dpark.serialize import marshalable
from dpark.utils import mkdir_p, atomic_file
from dpark.utils.log import get_logger
from dpark.tracker import GetValueMessage, AddItemMessage, RemoveItemMessage
from six.moves import map
from six.moves import range
from six.moves import urllib

logger = get_logger(__name__)


class Cache:
    data = {}

    def get(self, key):
        return self.data.get(key)

    def put(self, key, value, is_iterator=False):
        if value is not None:
            if is_iterator:
                value = list(value)
            self.data[key] = value
            return value
        else:
예제 #3
0
파일: executor.py 프로젝트: posens/dpark
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from dpark.utils import (compress, decompress, spawn, mkdir_p)
from dpark.utils.log import get_logger, init_dpark_logger, formatter_message
from dpark.utils.memory import ERROR_TASK_OOM, set_oom_score
from dpark.serialize import marshalable
from dpark.accumulator import Accumulator
from dpark.schedule import Success, FetchFailed, OtherFailure
from dpark.env import env
from dpark.shuffle import LocalFileShuffle
from dpark.mutable_dict import MutableDict
from dpark.serialize import loads
from dpark.task import TTID
from dpark.utils.debug import spawn_rconsole

logger = get_logger('dpark.executor')

TASK_RESULT_LIMIT = 1024 * 256
DEFAULT_WEB_PORT = 5055
MAX_EXECUTOR_IDLE_TIME = 60 * 60 * 24  # 1 day
KILL_TIMEOUT = 0.1  # 0.1 sec, to reply to mesos fast
TASK_LOST_JOIN_TIMEOUT = 3
TASK_LOST_DISCARD_TIMEOUT = 60
Script = ''


def setproctitle(x):
    try:
        from setproctitle import setproctitle as _setproctitle
        _setproctitle(x)
    except ImportError:
예제 #4
0
 def setLogLevel(level):
     get_logger('dpark').setLevel(level)
예제 #5
0
파일: executor.py 프로젝트: douban/dpark
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from dpark.utils import (
    compress, decompress, spawn, mkdir_p, DparkUserFatalError
)
from dpark.utils.log import get_logger, init_dpark_logger, formatter_message
from dpark.utils.memory import ERROR_TASK_OOM, set_oom_score
from dpark.serialize import marshalable
from dpark.accumulator import Accumulator
from dpark.env import env
from dpark.mutable_dict import MutableDict
from dpark.serialize import loads
from dpark.task import TTID, TaskState, TaskEndReason, FetchFailed
from dpark.utils.debug import spawn_rconsole
from dpark.shuffle import ShuffleWorkDir

logger = get_logger('dpark.executor')

TASK_RESULT_LIMIT = 1024 * 256
DEFAULT_WEB_PORT = 5055
MAX_EXECUTOR_IDLE_TIME = 60 * 60 * 24  # 1 day
KILL_TIMEOUT = 0.1  # 0.1 sec, to reply to mesos fast
TASK_LOST_JOIN_TIMEOUT = 3
TASK_LOST_DISCARD_TIMEOUT = 60
Script = ''


def setproctitle(x):
    try:
        from setproctitle import setproctitle as _setproctitle
        _setproctitle(x)
    except ImportError:
예제 #6
0
파일: context.py 프로젝트: douban/dpark
 def setLogLevel(level):
     get_logger('dpark').setLevel(level)