コード例 #1
0
 def test_partial(self):
     big = b"0123456789" * 10000
     data = [{b'full': 1, b'data': big}, {b'partial': 2, b'data': big}]
     cache = MockCache()
     key = PlaintextKey(None)
     chunks = CacheChunkBuffer(cache, key, None)
     for d in data:
         chunks.add(d)
     chunks.flush(flush=False)
     # the code is expected to leave the last partial chunk in the buffer
     self.assert_equal(len(chunks.chunks), 3)
     self.assert_true(chunks.buffer.tell() > 0)
     # now really flush
     chunks.flush(flush=True)
     self.assert_equal(len(chunks.chunks), 4)
     self.assert_true(chunks.buffer.tell() == 0)
     unpacker = msgpack.Unpacker()
     for id in chunks.chunks:
         unpacker.feed(cache.objects[id])
     self.assert_equal(data, list(unpacker))
コード例 #2
0
    def __init__(self, vim: Nvim) -> None:
        self.name = 'child'

        self._vim = vim
        self._filters: typing.Dict[str, typing.Any] = {}
        self._sources: typing.Dict[str, typing.Any] = {}
        self._profile_flag = None
        self._profile_start_time = 0
        self._loaded_sources: typing.Dict[str, typing.Any] = {}
        self._loaded_filters: typing.Dict[str, typing.Any] = {}
        self._source_errors: typing.Dict[str, int] = defaultdict(int)
        self._prev_results: typing.Dict[str, Result] = {}
        self._unpacker = msgpack.Unpacker(
            encoding='utf-8',
            unicode_errors='surrogateescape')
        self._packer = msgpack.Packer(
            use_bin_type=True,
            encoding='utf-8',
            unicode_errors='surrogateescape')
        self._ignore_sources: typing.List[typing.Any] = []
コード例 #3
0
ファイル: archive.py プロジェクト: dannyedel/borg
 def delete(self, stats, progress=False):
     unpacker = msgpack.Unpacker(use_list=False)
     items_ids = self.metadata[b'items']
     pi = ProgressIndicatorPercent(total=len(items_ids),
                                   msg="Decrementing references %3.0f%%",
                                   same_line=True)
     for (i, (items_id, data)) in enumerate(
             zip(items_ids, self.repository.get_many(items_ids))):
         if progress:
             pi.show(i)
         unpacker.feed(self.key.decrypt(items_id, data))
         self.cache.chunk_decref(items_id, stats)
         for item in unpacker:
             if b'chunks' in item:
                 for chunk_id, size, csize in item[b'chunks']:
                     self.cache.chunk_decref(chunk_id, stats)
     if progress:
         pi.finish()
     self.cache.chunk_decref(self.id, stats)
     del self.manifest.archives[self.name]
コード例 #4
0
def test_logging_transaction():
    out, pid = helper.proc.run(['-o', '-'])
    bs = io.BytesIO(out)

    # Get first message from iterator.
    msg_iter = msgpack.Unpacker(bs)
    msg1 = next(msg_iter)

    
    assert msg1[0] == b'dns-gazer.dns.tx'
    assert msg1[1] == 1444531212
    
    o1 = msg1[2]
    assert o1[b'client_addr'] == b'10.139.96.169'
    assert o1[b'server_addr'] == b'210.196.3.183'
    assert o1[b'server_port'] == 53
    
    assert len(o1[b'query']) == 1
    assert o1[b'query'][0][b'name'] == b'bf-pro-front.cloudapp.net.'
    assert o1[b'query'][0][b'type'] == b'A'
    assert o1[b'query'][0][b'section'] == b'question'

    assert len(o1[b'reply']) == 2
    assert o1[b'reply'][1][b'name'] == b'bf-pro-front.cloudapp.net.'
    assert o1[b'reply'][1][b'type'] == b'A'
    assert o1[b'reply'][1][b'data'] == b'23.100.102.231'
    assert o1[b'reply'][1][b'section'] == b'answer'

    
    msg2 = next(msg_iter)
    o2 = msg2[2]
    assert len(o2[b'query']) == 1
    assert o2[b'query'][0][b'name'] == b'news.nicovideo.jp.'
    assert o2[b'query'][0][b'type'] == b'A'
    assert o2[b'query'][0][b'section'] == b'question'

    assert len(o2[b'reply']) == 2
    assert o2[b'reply'][1][b'name'] == b'news.nicovideo.jp.'
    assert o2[b'reply'][1][b'type'] == b'A'
    assert o2[b'reply'][1][b'data'] == b'202.248.252.190'
    assert o2[b'reply'][1][b'section'] == b'answer'
コード例 #5
0
ファイル: mmp_cds_base.py プロジェクト: JeffRoy/mi-dataset
    def parse_file(self):
        """
        This method parses each chunk and attempts to extract samples to return.
        @return for each discovered sample, a list of tuples containing each particle and associated state position
        # information
        """

        # We need to put the following in a try block just in case the data provided is malformed
        try:
            # Let's iterate through each unpacked list item
            for unpacked_data in msgpack.Unpacker(self._stream_handle):

                # The expectation is that an unpacked list item associated with a McLane Moored Profiler cabled
                # docking station data chunk consists of a list of three items
                if isinstance(unpacked_data, tuple) or isinstance(unpacked_data, list) and \
                        len(unpacked_data) == NUM_MMP_CDS_UNPACKED_ITEMS:

                    # Extract the sample an provide the particle class which could be different for each
                    # derived MmpCdsParser

                    try:
                        data_particle = self._extract_sample(
                            self._particle_class, None, unpacked_data, None)
                        self._record_buffer.append(data_particle)
                    except SampleException:
                        log.debug(UNEXPECTED_UNPACKED_MSGPACK_FORMAT_MSG)
                        self._exception_callback(
                            SampleException(
                                UNEXPECTED_UNPACKED_MSGPACK_FORMAT_MSG))

                else:
                    log.debug(UNEXPECTED_UNPACKED_MSGPACK_FORMAT_MSG)
                    self._exception_callback(
                        SampleException(
                            UNEXPECTED_UNPACKED_MSGPACK_FORMAT_MSG))

        except TypeError:
            log.warn(UNABLE_TO_ITERATE_THROUGH_UNPACKED_MSGPACK_MSG)
            self._exception_callback(
                SampleException(
                    UNABLE_TO_ITERATE_THROUGH_UNPACKED_MSGPACK_MSG))
コード例 #6
0
ファイル: msgpack.py プロジェクト: thpatel/synapse
def iterfd(fd):
    '''
    Generator which unpacks a file object of msgpacked content.

    Args:
        fd: File object to consume data from.

    Notes:
        String objects are decoded using utf8 encoding.  In order to handle
        potentially malformed input, ``unicode_errors='surrogatepass'`` is set
        to allow decoding bad input strings.

    Yields:
        Objects from a msgpack stream.
    '''
    unpk = msgpack.Unpacker(fd,
                            use_list=False,
                            encoding='utf8',
                            unicode_errors='surrogatepass')
    for mesg in unpk:
        yield mesg
コード例 #7
0
    def __init__(self, name):
        self.name = name
        self.io = io.open(name, 'rb')
        self.inprogress = (self._inprogress == name[-len(self._inprogress):])
        self.upck = msgpack.Unpacker()
        while True:
            line = self.io.readline().strip()
            if line in self._formats:
                self.format = line
            elif line.startswith('0'):
                self.format_version = line
            elif line.startswith('Server: '):
                self.server_uuid = line[8:]
            elif line.startswith('VClock: '):
                self.vclock = line[8:]
            elif not line:
                break
            else:
                raise LogReaderError('')

        self.log_begin = self.io.tell()
コード例 #8
0
ファイル: chorus.py プロジェクト: Iziren/wade
    def _outgoing_connect_cb(self, peer_id, tcp_handle, error):
        """Called on attempt to make outgoing connection to a peer."""

        if error is not None:
            self._logger.error(
                "unable to establish connction to peer %d",
                peer_id,
            )
            del self._outgoing[peer_id]
            return

        self._outgoing[peer_id] = Outgoing(
            tcp_handle,
            msgpack.Packer(),
            msgpack.Unpacker(),
            self._conf[peer_id],
            {},
        )

        tcp_handle.start_read(partial(self._outgoing_read_cb, peer_id))
        self._logger.info("connect to peer %d", peer_id)
コード例 #9
0
ファイル: socket.py プロジェクト: dwinings/synapse
    def __init__(self, sock, **info):
        EventBus.__init__(self)

        self.sock = sock  # type: socket.socket
        self.plex = None
        self.unpk = msgpack.Unpacker(use_list=0, encoding='utf8')
        self.iden = s_common.guid()
        self.xforms = []        # list of SockXform instances
        self.info = info

        # used by Plex() tx
        self.txbuf = None
        self.txsize = 0

        if self.info.get('nodelay', True):
            self._tryTcpNoDelay()

        self.txque = collections.deque()
        self.rxque = collections.deque()

        self.onfini(self._finiSocket)
コード例 #10
0
    def __init__(self,
                 request,
                 client_address,
                 server,
                 controllers_prefix,
                 timeout_receive=5,
                 logger=None):

        self.controllers_prefix = controllers_prefix
        self.packer = msgpack.Packer(default=lambda x: x.to_msgpack())
        self.unpacker = msgpack.Unpacker()
        self.response = Response()
        self.timeout_receive = timeout_receive
        self.time_start = None

        if logger is None:
            self.logger = Logger.get_logger()
        else:
            self.logger = logger

        super(Handler, self).__init__(request, client_address, server)
コード例 #11
0
ファイル: frame.py プロジェクト: binref/refinery-test
 def __init__(self, stream: Optional[BinaryIO]):
     import msgpack
     self.finished = False
     self.trunk = ()
     self._next = Chunk(bytearray(), ())
     buffer = stream and stream.read(len(MAGIC)) or B''
     if buffer == MAGIC:
         self.framed = True
         self.stream = stream
         self.unpacker = msgpack.Unpacker(
             max_buffer_size=0xFFFFFFFF,
             use_list=False
         )
         self._advance()
         self.gauge = len(self._next.path)
     else:
         self.framed = False
         self.gauge = 0
         while buffer:
             self._next.extend(buffer)
             buffer = stream.read()
コード例 #12
0
ファイル: client.py プロジェクト: drunkdream/pytmate
 async def serve(self, wait_timeout=None, timeout=None):
     time0 = time.time()
     while timeout is None or time.time() - time0 < timeout:
         if not self._shell and wait_timeout and time.time() - time0 >= wait_timeout:
             utils.logger.warn(
                 "[%s] Wait for client timeout" % self.__class__.__name__
             )
             return
         try:
             buffer = await asyncio.wait_for(
                 self._channel_reader.read(4096), timeout=1
             )
         except asyncio.TimeoutError:
             continue
         except asyncssh.ConnectionLost:
             break
         unpacker = msgpack.Unpacker()
         unpacker.feed(buffer)
         for msg in unpacker:
             self.process_message_in(msg)
     utils.logger.info("[%s] TMate client exit" % self.__class__.__name__)
コード例 #13
0
    def setup():
        g.msg_packer = msgpack.Packer()
        g.msg_unpacker = msgpack.Unpacker()

        g.authed = False

        # Catch logged in users
        if "email" in session:
            user = db.users.find_one({"email": session["email"]})
            if user is not None:
                g.user = user
                g.authed = True
        elif ("api_key" in request.args and "signature" in request.args
              and "expires" in request.args):
            user = db.users.find_one({"api_key": request.args["api_key"]})
            if user is None:
                return
            if check_request(request, user["secret"]):
                g.user = user
                g.authed = True
                session["email"] = user["email"]
コード例 #14
0
ファイル: process.py プロジェクト: ccope/deoplete.nvim
 def __init__(self, commands, context, cwd):
     startupinfo = None
     if os.name == 'nt':
         startupinfo = subprocess.STARTUPINFO()
         startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
     self._proc = subprocess.Popen(commands,
                                   stdin=subprocess.PIPE,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   startupinfo=startupinfo,
                                   cwd=cwd)
     self._eof = False
     self._context = context
     self._packer = msgpack.Packer(use_bin_type=True,
                                   encoding='utf-8',
                                   unicode_errors='surrogateescape')
     self._unpacker = msgpack.Unpacker(encoding='utf-8',
                                       unicode_errors='surrogateescape')
     self._queue_out = Queue()
     self._thread = Thread(target=self.enqueue_output)
     self._thread.start()
コード例 #15
0
    def _load(self):
        for path in self.paths:
            for filename in glob.glob(path):
                mtime = os.path.getmtime(filename)
                if mtime == self._filename_to_mtime.get(filename):
                    # This file has not changed since last time we loaded it.
                    continue
                self._filename_to_mtime[filename] = mtime
                with open(filename, 'rb') as file:
                    unpacker = msgpack.Unpacker(file, **UNPACK_OPTIONS)
                    try:
                        name, run_start_doc = next(unpacker)
                    except StopIteration:
                        # Empty file, maybe being written to currently
                        continue

                def gen():
                    with open(filename, 'rb') as file:
                        yield from msgpack.Unpacker(file, **UNPACK_OPTIONS)

                self.upsert(gen, (), {})
コード例 #16
0
    def __init__(self, log_file):
        self.log_file = Path(log_file)

        with open(self.log_file, 'rb') as infile:
            raw_log = list(msgpack.Unpacker(infile, encoding='utf-8'))
            self.seek_pos = infile.tell()

        self.logs = recursive_default_dict()
        self.hparams = None
        self.notes = []

        self.split = 'none'
        self.current_batch_count = 0
        self.batch_counter = -1
        self.epoch_starts = []

        self.safe_state = True
        self.safe_count = 0

        for entry in raw_log:
            self._process_entry(entry)
コード例 #17
0
    def decode(self, value):
        self.read_only = self.__class__.read_only
        unpacked = ''
        error = ''

        try:
            unpacked = msgpack.unpackb(value, raw=False, timestamp=3)
        except msgpack.ExtraData as e:
            self.read_only = True

            buf = io.BytesIO(value)
            unpacker = msgpack.Unpacker(buf, raw=False)
            for data in unpacker:
                unpacked = data
                error = f'First object from the stream is shown, ' \
                        f'value was truncated by {len(e.extra)} bytes.'

        return {
            'output': json.dumps(unpacked, default=self.default),
            'error': error
        }
コード例 #18
0
ファイル: child.py プロジェクト: niaralab/dotfaster
    def __init__(self, vim):
        self.name = 'child'

        self._vim = vim
        self._filters = {}
        self._sources = {}
        self._custom = []
        self._profile_flag = None
        self._profile_start_time = 0
        self._loaded_sources = {}
        self._loaded_filters = {}
        self._source_errors = defaultdict(int)
        self._prev_results = {}
        self._unpacker = msgpack.Unpacker(
            encoding='utf-8',
            unicode_errors='surrogateescape')
        self._packer = msgpack.Packer(
            use_bin_type=True,
            encoding='utf-8',
            unicode_errors='surrogateescape')
        self._ignore_sources = []
コード例 #19
0
    def _yield_blocks_from_file(self, file_path, direction, start=None):
        assert direction in (1, -1)
        storage = self.block_storage

        unpacker = msgpack.Unpacker()
        unpacker.feed(storage.load(file_path))
        if direction == -1:
            unpacker = always_reversible(unpacker)

        for block_compact_dict in unpacker:
            block = Block.from_compact_dict(block_compact_dict)
            block_number = block.message.block_number
            # TODO(dmu) HIGH: Implement a better skip
            if start is not None:
                if direction == 1 and block_number < start:
                    continue
                elif direction == -1 and block_number > start:
                    continue

            self.blocks_cache[block_number] = block
            yield block
コード例 #20
0
def main():

    ifnames = "/home/parantapa/data/reddit/reddit_post/*"
    median_name = './median_comments.txt'
    comment_name = "./post_common_number.json"
    ofname = "./final_sub_posts.json"

    median_data = {}
    with open(median_name) as fobj:
        for line in fobj:
            if len(line) > 1:
                cols = line.split()
                median_data[cols[0]] = int(cols[1])

    comment_number = {}
    with open(comment_name) as fobj:
        for line in fobj:
            line = json.loads(line)
            comment_number[line[0]] = int(line[1])

    subrddit_data = defaultdict(list)
    for ifname in glob(ifnames):
        with gzip.open(ifname) as fobj:
            unpacker = msgpack.Unpacker(fobj, encoding="utf-8")
            for post in unpacker:
                id, _, sub, _, _, _, _, _, _ = post

                if sub in median_data:
                    ncoms = comment_number.get(id, 0)
                    # print 'sub=',sub, ' id= ',id
                    if ncoms >= median_data[sub]:
                        subrddit_data[sub].append(id)

    for sub, link_ids in subrddit_data.iteritems():
        if len(link_ids) > 10:
            link_ids = random.sample(link_ids, 10)
        subrddit_data[sub] = link_ids

    with open(ofname, 'w') as fobj:
        json.dump(subrddit_data, fobj)
コード例 #21
0
    def _do_call(self, func_name, args, kwargs):
        with self.lock:
            if not self.sock:
                try:
                    connected = False
                    with Timeout(self.timeout):
                        s = socket.socket()
                        s.connect(self.addr)
                        connected = True

                    if not connected:
                        raise RPCError("Connection time out!")

                except socket.error:
                    raise RPCError("can't connect!")

                s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
                s.read = s.recv
                s.write = s.send
                self.unpacker = msgpack.Unpacker(s)
                self.sock = s

            try:
                msgpack.pack([REQUEST, func_name, args, kwargs], self.sock)
                resp = self.unpacker.unpack()
            except:
                swallow(self.sock.close)()
                self.sock = self.unpacker = None
                raise RPCServerGone()

            if resp[0] == RESPONSE:
                # resp == [RESPONSE, result]
                return resp[1]
            elif resp[0] == EXCEPTION:
                # resp == [EXCEPTION, name, traceback_text]
                exc = RPCError('Remote exception: %s' % resp[1])
                exc.traceback_text = resp[2]
                raise exc

            raise RPCError('Wrong protocol!')
コード例 #22
0
ファイル: fuse.py プロジェクト: yarwelp/borg
 def process_archive(self, archive, prefix=[]):
     """Build fuse inode hierarchy from archive metadata
     """
     unpacker = msgpack.Unpacker()
     for key, chunk in zip(archive.metadata[b'items'], self.repository.get_many(archive.metadata[b'items'])):
         _, data = self.key.decrypt(key, chunk)
         unpacker.feed(data)
         for item in unpacker:
             item = Item(internal_dict=item)
             segments = prefix + os.fsencode(os.path.normpath(item.path)).split(b'/')
             del item.path
             num_segments = len(segments)
             parent = 1
             for i, segment in enumerate(segments, 1):
                 # Insert a default root inode if needed
                 if self._inode_count == 0 and segment:
                     archive_inode = self.allocate_inode()
                     self.items[archive_inode] = self.default_dir
                     self.parent[archive_inode] = parent
                 # Leaf segment?
                 if i == num_segments:
                     if 'source' in item and stat.S_ISREG(item.mode):
                         inode = self._find_inode(item.source, prefix)
                         item = self.cache.get(inode)
                         item.nlink = item.get('nlink', 1) + 1
                         self.items[inode] = item
                     else:
                         inode = self.cache.add(item)
                     self.parent[inode] = parent
                     if segment:
                         self.contents[parent][segment] = inode
                 elif segment in self.contents[parent]:
                     parent = self.contents[parent][segment]
                 else:
                     inode = self.allocate_inode()
                     self.items[inode] = self.default_dir
                     self.parent[inode] = parent
                     if segment:
                         self.contents[parent][segment] = inode
                     parent = inode
コード例 #23
0
ファイル: mpf.py プロジェクト: CivicSpleen/ambry_sources
    def __init__(self, parent, fh, compress=True):
        """Reads the file_header and prepares for iterating over rows"""

        self.parent = parent
        self._fh = fh
        self._compress = compress
        self._headers = None
        self.data_start = 0
        self.meta_start = 0
        self.data_start_row = 0
        self.data_end_row = 0

        self.pos = 0  # Row position for next read, starts at 1, since header is always 0

        self.n_rows = 0
        self.n_cols = 0

        self._in_iteration = False

        MPRowsFile.read_file_header(self, self._fh)

        try:
            self.data_start = int(self._fh.tell())

            assert self.data_start == self.FILE_HEADER_FORMAT_SIZE
        except AttributeError:
            # The pyfs HTTP filesystem doesn't have tell()
            self.data_start = self.FILE_HEADER_FORMAT_SIZE

        if self._compress:
            self._zfh = GzipFile(fileobj=self._fh, end_of_data=self.meta_start)
        else:
            self._zfh = self._fh

        self.unpacker = msgpack.Unpacker(self._zfh,
                                         object_hook=MPRowsFile.decode_obj,
                                         use_list=False,
                                         encoding='utf-8')

        self._meta = None
コード例 #24
0
    async def _reader(self, scope):
        """Main loop for reading

        TODO: add a timeout for receiving message bodies.
        """
        unpacker = msgpack.Unpacker(object_hook=self._decode_addr_key)
        cur_msg = None

        async with anyio.open_cancel_scope(shield=True) as s:
            await scope.set(s)

            try:
                while self._socket is not None:
                    if cur_msg is not None:
                        logger.debug("%d:wait for body", self._conn_id)
                    try:
                        buf = await self._socket.receive_some(
                            self._socket_recv_size)
                    except ClosedResourceError:
                        return  # closed by us
                    if len(buf) == 0:  # Connection was closed.
                        raise SerfClosedError("Connection closed by peer")
                    unpacker.feed(buf)

                    for msg in unpacker:
                        if cur_msg is not None:
                            logger.debug("%d  Body=%s", self._conn_id, msg)
                            cur_msg.body = msg
                            await self._handle_msg(cur_msg)
                            cur_msg = None
                        else:
                            logger.debug("%d:Recv =%s", self._conn_id, msg)
                            msg = SerfResult(msg)
                            if await self._handle_msg(msg):
                                cur_msg = msg
            finally:
                hdl, self._handlers = self._handlers, None
                async with anyio.open_cancel_scope(shield=True):
                    for m in hdl.values():
                        await m.cancel()
コード例 #25
0
ファイル: remote.py プロジェクト: hendrikvanantwerpen/borg
    def __init__(self, location, create=False):
        self.location = location
        self.preload_ids = []
        self.msgid = 0
        self.to_send = b''
        self.cache = {}
        self.ignore_responses = set()
        self.responses = {}
        self.unpacker = msgpack.Unpacker(use_list=False)
        self.p = None
        # XXX: ideally, the testsuite would subclass Repository and
        # override ssh_cmd() instead of this crude hack, although
        # __testsuite__ is not a valid domain name so this is pretty
        # safe.
        if location.host == '__testsuite__':
            args = [sys.executable, '-m', 'borg.archiver', 'serve'
                    ] + self.extra_test_args
        else:  # pragma: no cover
            args = self.ssh_cmd(location)
        self.p = Popen(args, bufsize=0, stdin=PIPE, stdout=PIPE)
        self.stdin_fd = self.p.stdin.fileno()
        self.stdout_fd = self.p.stdout.fileno()
        fcntl.fcntl(self.stdin_fd, fcntl.F_SETFL,
                    fcntl.fcntl(self.stdin_fd, fcntl.F_GETFL) | os.O_NONBLOCK)
        fcntl.fcntl(self.stdout_fd, fcntl.F_SETFL,
                    fcntl.fcntl(self.stdout_fd, fcntl.F_GETFL) | os.O_NONBLOCK)
        self.r_fds = [self.stdout_fd]
        self.x_fds = [self.stdin_fd, self.stdout_fd]

        try:
            version = self.call('negotiate', 1)
        except ConnectionClosed:
            raise Exception(
                'Server immediately closed connection - is Borg installed and working on the server?'
            )
        if version != 1:
            raise Exception(
                'Server insisted on using unsupported protocol version %d' %
                version)
        self.id = self.call('open', location.path, create)
コード例 #26
0
ファイル: minidump.py プロジェクト: luckylk/sentry
def merge_attached_breadcrumbs(mpack_breadcrumbs, data):
    # Merge msgpack breadcrumb file.
    if mpack_breadcrumbs.size > MAX_MSGPACK_BREADCRUMB_SIZE_BYTES:
        return

    try:
        unpacker = msgpack.Unpacker(mpack_breadcrumbs)
        breadcrumbs = list(unpacker)
    except (UnpackException, ExtraData) as e:
        minidumps_logger.exception(e)
        return

    if not breadcrumbs:
        return

    current_crumbs = data.get('breadcrumbs')
    if not current_crumbs:
        data['breadcrumbs'] = breadcrumbs
        return

    current_crumb = next(
        (c for c in reversed(current_crumbs)
         if isinstance(c, dict) and c.get('timestamp') is not None), None)
    new_crumb = next(
        (c for c in reversed(breadcrumbs)
         if isinstance(c, dict) and c.get('timestamp') is not None), None)

    # cap the breadcrumbs to the highest count of either file
    cap = max(len(current_crumbs), len(breadcrumbs))

    if current_crumb is not None and new_crumb is not None:
        if dp.parse(current_crumb['timestamp']) > dp.parse(
                new_crumb['timestamp']):
            data['breadcrumbs'] = breadcrumbs + current_crumbs
        else:
            data['breadcrumbs'] = current_crumbs + breadcrumbs
    else:
        data['breadcrumbs'] = current_crumbs + breadcrumbs

    data['breadcrumbs'] = data['breadcrumbs'][-cap:]
コード例 #27
0
    def handle_stream(self, stream, address):
        '''
        Handle incoming streams and add messages to the incoming queue
        '''
        log.trace('Req client {0} connected'.format(address))
        self.clients.append((stream, address))
        unpacker = msgpack.Unpacker()
        try:
            while True:
                wire_bytes = yield stream.read_bytes(4096, partial=True)
                unpacker.feed(wire_bytes)
                for framed_msg in unpacker:
                    header = framed_msg['head']
                    self.io_loop.spawn_callback(self.message_handler, stream, header, framed_msg['body'])

        except tornado.iostream.StreamClosedError:
            log.trace('req client disconnected {0}'.format(address))
            self.clients.remove((stream, address))
        except Exception as e:
            log.trace('other master-side exception??', e, e.__module__, e.extra)
            self.clients.remove((stream, address))
            stream.close()
コード例 #28
0
def insert_tags(conn):
    objs = []
    unpacker = msgpack.Unpacker()
    for buff in stdin:
        unpacker.feed(buff)
        for o in unpacker:
            objs.append(o)

        if len(objs) > 3600:
            conn.executemany(
                'INSERT INTO readings'
                ' (equipment, channel, value)'
                ' VALUES (:eqid, :tag, :value);', objs)
            conn.commit()
            objs = []

    if len(objs) > 0:
        conn.executemany(
            'INSERT INTO readings'
            ' (equipment, channel, value)'
            ' VALUES (:eqid, :tag, :value);', objs)
        conn.commit()
コード例 #29
0
ファイル: base.py プロジェクト: emin63/oxtie
    def packed_to_hdr_and_body(cls, packed):
        """Unpack the header and body from a packed serialized representation.

        :param packed:     The packed serialized representation.

        ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-

        :return:  The pair (hdr, body) where hdr is a dict containing
                  header information about the stored item and body is
                  the (still serialized) body.

        ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-

        PURPOSE:  Unpack the header and get the body. You will probably
                  still need to deserialize the body. This is a helper
                  method and not meant to be called directly.

        """
        dummy = cls
        unpacker = msgpack.Unpacker(packed, encoding='utf-8')
        hdr, body = unpacker
        return hdr, body
コード例 #30
0
ファイル: germaind.py プロジェクト: asaalmann/germain
 def load(self):
     """ Loads state from the log file. """
     if os.path.exists('germaind.msgpacks'):
         with open('germaind.msgpacks') as f:
             unpacker = msgpack.Unpacker(f)
             while True:
                 try:
                     d = unpacker.unpack()
                 except msgpack.OutOfData:
                     break
                 clientid, ts, bits, res = d
                 if bits not in self.results:
                     self.results[bits] = [0, 0]
                 N = 0
                 n = 0
                 for entry in res:
                     N += 1
                     if entry[0]:
                         n += 1
                 self.results[bits][0] += N
                 self.results[bits][1] += n
     self.f = open('germaind.msgpacks', 'a')