예제 #1
1
파일: protocol.py 프로젝트: zidz/bhindex
def decodeMessage(buf, msg_map=MSG_MAP):
    '''Decodes a single message from buffer
    @return (msg, bytesConsumed)
    @raises IndexError if buffer did not contain complete message
    '''
    id, newpos = decoder._DecodeVarint32(buf,0)
    size, newpos = decoder._DecodeVarint32(buf,newpos)
    id = id >> 3
    msgend = newpos+size
    if msgend > len(buf):
        raise IndexError, 'Incomplete message'
    msg = msg_map[id].message_type._concrete_class()
    msg.ParseFromString(buf[newpos:msgend])
    return msg, msgend
예제 #2
1
def get_delimited_message_bytes(byte_stream, nr=4):
	(length, pos) = decoder._DecodeVarint32(byte_stream.read(nr), 0)
	delimiter_bytes = nr - pos
	byte_stream.rewind(delimiter_bytes)
	message_bytes = byte_stream.read(length)
	total_len = length + pos
	return (total_len, message_bytes)
예제 #3
0
파일: fs_c.py 프로젝트: kumaraguruv/fs-c
 def read_message(self, m):
     self.extend_buffer(8)
     (data_size, data_size_len) = decoder._DecodeVarint32(buffer(self.buffer, self.offset), 0)
     self.clear_from_buffer(data_size_len)
     self.extend_buffer(data_size)
     m.ParseFromString(buffer(self.buffer, self.offset, data_size))
     self.clear_from_buffer(data_size)
예제 #4
0
    def parse_metric_family(self, buf, content_type):
        """
        Gets the output data from a prometheus endpoint response along with its
        Content-type header and parses it into Prometheus classes (see [0])

        Parse the binary buffer in input, searching for Prometheus messages
        of type MetricFamily [0] delimited by a varint32 [1] when the
        content-type is a `application/vnd.google.protobuf`.

        [0] https://github.com/prometheus/client_model/blob/086fe7ca28bde6cec2acd5223423c1475a362858/metrics.proto#L76-%20%20L81
        [1] https://developers.google.com/protocol-buffers/docs/reference/java/com/google/protobuf/AbstractMessageLite#writeDelimitedTo(java.io.OutputStream)
        """
        if 'application/vnd.google.protobuf' in content_type:
            n = 0
            while n < len(buf):
                msg_len, new_pos = _DecodeVarint32(buf, n)
                n = new_pos
                msg_buf = buf[n:n+msg_len]
                n += msg_len

                message = metrics_pb2.MetricFamily()
                message.ParseFromString(msg_buf)
                yield message
        elif 'text/plain' in content_type:
            messages = {}  # map with the name of the element (before the labels) and the list of occurrences with labels and values
            obj_map = {}   # map of the types of each metrics
            obj_help = {}  # help for the metrics
            for line in buf.splitlines():
                self._extract_metrics_from_string(line, messages, obj_map, obj_help)
            for _m in obj_map:
                if _m in messages or (obj_map[_m] == 'histogram' and '{}_bucket'.format(_m) in messages):
                    yield self._extract_metric_from_map(_m, messages, obj_map, obj_help)
        else:
            raise self.UnknownFormatError('Unsupported content-type provided: {}'.format(content_type))
예제 #5
0
파일: cloud_client.py 프로젝트: cgorbit/rem
def DecodeVarint32(stream):
    try:
        return _DecodeVarint32(StreamLikeBuffer(stream), 0)[0]
    except StreamLikeBuffer.IndexError as e:
        if e.index == 0:
            return None
        else:
            raise
예제 #6
0
파일: pb.py 프로젝트: oliverlee/phobos
def decode_delimited(message, data):
    length, position = _DecodeVarint32(data, 0)
    if (length + position) > len(data):
        raise MissingDataError
    if (length + position) < len(data):
        raise ExtraDataError

    message.ParseFromString(data[position:position + length])
    return message
예제 #7
0
def get_message_contents(raw_data):
    """ returns the contents of a sized message as byte string.

        The function is similar to read_sized_message except, but we do not parse the message data

        A current limitation is that the crc value is not checked
    """
    (data_size, data_size_len) = decoder._DecodeVarint32(raw_data, 0)
    return raw_data[data_size_len:data_size_len+data_size]
예제 #8
0
파일: launcher.py 프로젝트: OhBaby/reef
 def read_next(self):
     bytes = []
     result = actor_pb2.ActorMessage()
     while True:
         bytes += self.__file.read(1)
         try:
             result.ParseFromString(self.__file.read(decoder._DecodeVarint32(bytes, 0)[0]))
             return result
         except IndexError:
             continue
예제 #9
0
    def datagramReceived(self, data, addr):
        buf = None
        if data:
            length, length_len = decoder._DecodeVarint32(data, 0)
            # data is too big
            if length > (self.MAX_LENGTH - length_len):
                print "too much data - ignoring"
                return
            buf = data[length_len:]

        if buf:
            self.stringReceived(addr, buf)
예제 #10
0
    def showResp(self):
        while self.sk == None:
            time.sleep(1)
                
        while True:
            buf = ''
            while True:
                tmpBuf = self.sk.recv(1) # 4 is protobuf string
                if len(tmpBuf) == 0:
                    raise Exception, 'close'
                buf += tmpBuf
                if ord(tmpBuf[0]) & 0x80 == 0:
                    break

            cmd_len = decoder._DecodeVarint32(buf, 0)[0]
            prefix_len = len(buf)
            print "cmd len", cmd_len
            while len(buf) < cmd_len + prefix_len:
                tmpBuf = self.sk.recv(cmd_len + prefix_len - len(buf))
                if len(tmpBuf) == 0:
                    raise Exception, 'close'
                buf += tmpBuf
            cmd = CommandMsg()
            cmd.ParseFromString(buf[prefix_len:])


            if cmd.type == CmdType.FIGHT_RESP:
                print "fight resp"
                self.showFightResp(cmd.content)
            elif cmd.type == CmdType.QUESTION:
                print "question"
                self.showQuestion(cmd.content)
            elif cmd.type == CmdType.FIGHT_RESULT:
                print "fight result"
                self.showResult(cmd.content)
            elif cmd.type == CmdType.FIGHT_REQ:
                print "fight req"
                self.showFightReq(cmd.content)
            elif cmd.type == CmdType.FETCH_PEER_LIST_RESP:
                print "fetch peer list resp"
                self.showFetchList(cmd.content)
            elif cmd.type == CmdType.FIGHT_STATE:
                print "fight state"
                self.showFightState(cmd.content)
            elif cmd.type == CmdType.UNKNOWN_OP:
                print "unkonwn op"
            elif cmd.type == CmdType.FIGHT_CANCEL:
                print "fight cancel"
            else:
                print "bad op"
예제 #11
0
 def read_block(self):
     
     TO_READ = 4
     content = self.in_stream.read(TO_READ)
     total_size, offset = _DecodeVarint32(content, 0)
     content = content[offset:]
     
     consumed = TO_READ - offset
     to_read = total_size - consumed
     
     if to_read > 0:
         content += self.in_stream.read(to_read)
     
     self.buffer += uncompress(content)
예제 #12
0
    def dataReceived(self, data):
        # ignoring data
        if self._over_length >= 0:
            if data:
                #print "got %d bytes of extra data" % len(data)
                self._over_length += len(data)
            # ignored data reached, keep the rest
            #print "ignoring %d/%d bytes" % (self._over_length, self._length)
            if self._over_length >= self._length:
                diff = (self._length - self._over_length)
                self._buf = data[diff:] if diff > 0 else ''
                # cancel incoming buffer since we already appended it
                data = None
                self._over_length = -1
                self._length = -1
                #print "ignore limit reached - keeping %d bytes" % len(self._buf)
            else:
                # do not continue with parsing
                return

        if data:
            self._buf += data
        #print "length-before:", len(self._buf)
        # no data received yet
        if self._length < 0 and self._buf:
            #print "parsing length..."
            length, length_len = decoder._DecodeVarint32(self._buf, 0)
            #print [length, length_len]
            self._length = length
            # remove length's length from buffer and continue reading
            self._buf = self._buf[length_len:]
            #print "length-after:", len(self._buf)

            # data is too big - drop buffer and ignore the rest
            if length > self.MAX_LENGTH:
                print "too much data - ignoring"
                self._over_length = len(self._buf)
                self._buf = ''
                return

        if self._length > 0 and len(self._buf) >= self._length and self._over_length < 0:
            #print "length %d reached (%d)" % (self._length, len(self._buf))
            out = self._buf[:self._length]
            #print "out data %d" % len(out)
            self.stringReceived(out)
            # don't forget the next pack :)
            self._buf = self._buf[self._length:]
            self._length = -1
            if self._buf:
                self.dataReceived(None)
예제 #13
0
def load_label_feature(filename):
    features = []
    with open(filename, 'rb') as f:
        size = readVarint32(f)
        while size:
            read_bytes, _ = decoder._DecodeVarint32(size, 0)
            data = f.read(read_bytes)
            if len(data) < read_bytes:
                print ("Failed to load protobuf")
                break
            fea = feature_pb2.Feature()
            fea.ParseFromString(data)
            features.append(fea)
            size = readVarint32(f)
    return features
예제 #14
0
def readDelimitedFrom_inplace(message, stream):
    raw_varint32 = readRawVarint32(stream)

    if raw_varint32:
        size, _ = decoder._DecodeVarint32(raw_varint32, 0)

        data = stream.read(size)
        if len(data) < size:
            raise Exception("Unexpected end of file")

        message.ParseFromString(data)

        return message
    else:
        return None
예제 #15
0
    def dataReceived(self, data):
        '''!Twisted-API! When data arrives, append to buffer and try to parse into
        BitHorde-messages'''
        self.buf += data

        dataleft = True
        while dataleft:
            buf = self.buf
            try:
                id, newpos = decoder._DecodeVarint32(buf,0)
                size, newpos = decoder._DecodeVarint32(buf,newpos)
                id = id >> 3
                msgend = newpos+size
                if msgend > len(buf):
                    dataleft = False
            except IndexError:
                dataleft = False

            if dataleft:
                self.buf = buf[msgend:]
                msg = MSGMAP[id].message_type._concrete_class()
                msg.ParseFromString(buf[newpos:msgend])

                self.msgHandler(msg)
예제 #16
0
파일: channel.py 프로젝트: Digoss/snakebite
def get_delimited_message_bytes(byte_stream):
    ''' Parse a delimited protobuf message. This is done by first getting a protobuf varint from
    the stream that represents the length of the message, then reading that amount of
    from the message and then parse it.
    Since the int can be represented as max 4 bytes, first get 4 bytes and try to decode.
    The decoder returns the value and the position where the value was found, so we need
    to rewind the buffer to the position, because the remaining bytes belong to the message
    after.
    '''

    (length, pos) = decoder._DecodeVarint32(byte_stream.read(4), 0)
    log.debug("Delimited message length (pos %d): %d" % (pos, length))

    byte_stream.rewind(4 - pos)
    message_bytes = byte_stream.read(length)
    log.debug("Delimited message bytes (%d): %s" % (len(message_bytes), format_bytes(message_bytes)))
    return message_bytes
예제 #17
0
def _read_next(f, msg):
    # Reads next message from the given file, following suite with C++.
    # Number of bytes we need to consume so that we may still use
    # `_DecodeVarint32`.
    peek_size = 4
    peek = f.read(peek_size)
    if len(peek) == 0:
        # We have reached the end.
        return _READ_END
    msg_size, peek_end = _DecodeVarint32(peek, 0)
    peek_left = peek_size - peek_end
    # Read remaining and concatenate.
    remaining = f.read(msg_size - peek_left)
    msg_raw = peek[peek_end:] + remaining
    assert len(msg_raw) == msg_size
    # Now read the message.
    msg.ParseFromString(msg_raw)
    return _READ_GOOD
예제 #18
0
def parse_metric_family(buf):
    """
    Parse the binary buffer in input, searching for Prometheus messages
    of type MetricFamily [0] delimited by a varint32 [1].

    [0] https://github.com/prometheus/client_model/blob/086fe7ca28bde6cec2acd5223423c1475a362858/metrics.proto#L76-%20%20L81
    [1] https://developers.google.com/protocol-buffers/docs/reference/java/com/google/protobuf/AbstractMessageLite#writeDelimitedTo(java.io.OutputStream)
    """
    n = 0
    while n < len(buf):
        msg_len, new_pos = _DecodeVarint32(buf, n)
        n = new_pos
        msg_buf = buf[n:n+msg_len]
        n += msg_len

        message = metrics_pb2.MetricFamily()
        message.ParseFromString(msg_buf)
        yield message
예제 #19
0
def recv(sock):
    # buf = []
    # n = 0
    # while n < len(buf):
    #     msg_len, new_pos = _DecodeVarint32(buf, n)
    #     n = new_pos
    #     msg_buf = buf[n:n+msg_len]
    #     n += msg_len
    #     read_metric = metric_pb2.Metric()
    #     read_metric.ParseFromString(msg_buf)
    buff = []
    while True:
        tmp = sock.recv(1)
        buff += tmp
        msg_len, new_pos = _DecodeVarint32(buff, 0)
        if new_pos != 0:
            break
    #should socket be replaced with sock?
    msg = sock.recv(msg_len)
    print("receive a msg: ")
    print(msg)
    return msg
예제 #20
0
    def parse_metric_family(self, buf, content_type):
        """
        Gets the output data from a prometheus endpoint response along with its
        Content-type header and parses it into Prometheus classes (see [0])

        Parse the binary buffer in input, searching for Prometheus messages
        of type MetricFamily [0] delimited by a varint32 [1] when the
        content-type is a `application/vnd.google.protobuf`.

        [0] https://github.com/prometheus/client_model/blob/086fe7ca28bde6cec2acd5223423c1475a362858/metrics.proto#L76-%20%20L81
        [1] https://developers.google.com/protocol-buffers/docs/reference/java/com/google/protobuf/AbstractMessageLite#writeDelimitedTo(java.io.OutputStream)
        """
        if 'application/vnd.google.protobuf' in content_type:
            n = 0
            while n < len(buf):
                msg_len, new_pos = _DecodeVarint32(buf, n)
                n = new_pos
                msg_buf = buf[n:n + msg_len]
                n += msg_len

                message = metrics_pb2.MetricFamily()
                message.ParseFromString(msg_buf)
                yield message
        elif 'text/plain' in content_type:
            messages = {
            }  # map with the name of the element (before the labels) and the list of occurrences with labels and values
            obj_map = {}  # map of the types of each metrics
            obj_help = {}  # help for the metrics
            for line in buf.splitlines():
                self._extract_metrics_from_string(line, messages, obj_map,
                                                  obj_help)
            for _m in obj_map:
                if _m in messages or (obj_map[_m] == 'histogram'
                                      and '{}_bucket'.format(_m) in messages):
                    yield self._extract_metric_from_map(
                        _m, messages, obj_map, obj_help)
        else:
            raise self.UnknownFormatError(
                'Unsupported content-type provided: {}'.format(content_type))
예제 #21
0
    def read_and_deal_cmd(self):
        while True:
            tmpBuf = self.sk.recv(1) # 4 is protobuf string
            if len(tmpBuf) == 0:
                raise Exception, 'close'
            self.cmd_buf += tmpBuf
            if ord(tmpBuf[0]) & 0x80 == 0:
                break

        cmd_len = decoder._DecodeVarint32(self.cmd_buf, 0)[0]
        prefix_len = len(self.cmd_buf)
        print "cmd len", cmd_len
        while len(self.cmd_buf) < cmd_len + prefix_len:
            tmpBuf = self.sk.recv(cmd_len + prefix_len - len(self.cmd_buf))
            if len(tmpBuf) == 0:
                raise Exception, 'close'
            self.cmd_buf += tmpBuf
        cmd = CommandMsg()
        cmd.ParseFromString(self.cmd_buf[prefix_len:])
        print "receive cmd", cmd.type
        self.deal_cmd(cmd.type, cmd.content)
        self.cmd_buf = ''
예제 #22
0
    def recv(self):
        """
        check for first 4 bytes to determine response length. Since size is variable int, the length could be from 1 to 4
        need to take care of it in case swallowing the real message
        return response in string, need to parseFromString in the calling method
        """

        #int length is at most 4 bytes long
        hdr_bytes = self.sock.recv(4)
        (msg_length, hdr_length) = _DecodeVarint32(hdr_bytes, 0)
        rsp_buffer = io.BytesIO()
        if hdr_length < 4:
            rsp_buffer.write(hdr_bytes[hdr_length:])

        # read the remaining message bytes
        msg_length = msg_length - (4 - hdr_length)
        while msg_length > 0:
            rsp_bytes = self.sock.recv(min(8096, msg_length))
            rsp_buffer.write(rsp_bytes)
            msg_length = msg_length - len(rsp_bytes)

        return rsp_buffer.getvalue()
예제 #23
0
def split_protobuf_stream(chunk_iterator, message_class):
    buf = None
    for chunk in chunk_iterator:
        if buf is None:
            buf = chunk
        else:
            buf += chunk

        while len(buf):
            try:
                # n is advanced beyond the varint
                msg_len, n = _DecodeVarint32(buf, 0)
            except IndexError:
                break  # Need another chunk

            if n + msg_len > len(buf):
                break  # Need another chunk

            msg_buf = buf[n:(n + msg_len)]
            buf = buf[(n + msg_len):]
            message = message_class()
            message.ParseFromString(msg_buf)
            yield message
예제 #24
0
def get_instances(json_data, file_name):
    print("Loading {}".format(file_name))
    f = open(file_name, 'rb')
    buf = f.read()
    cur_pos = 0
    cnt_ins = 0
    cnt_relfact = 0
    while (cur_pos < len(buf)):
        msg_len, new_pos = _DecodeVarint32(buf, cur_pos)
        cur_pos = new_pos
        msg_buf = buf[cur_pos:cur_pos+msg_len]
        cur_pos += msg_len 
        relfact = pb.Relation()
        cnt_relfact += 1
        relfact.ParseFromString(msg_buf)
        head = guid2entity[relfact.sourceGuid]
        tail = guid2entity[relfact.destGuid]
        relation = relfact.relType
        for ins in relfact.mention:
            cnt_ins += 1
            json_data.append({'sentence': ins.sentence, 'head': head, 'tail': tail, 'relation': relation})
    f.close()
    print("Finish loading, got {} instances and {} relfacts totally".format(cnt_ins, cnt_relfact))
예제 #25
0
 def __init__(self, directory: str, msg_class: Type[MsgClass]):
     """
     Constructs a ProtoLog from the directory of delimited Protobuf 'RepeatedAnyMsg' messages
     at the given path.
     :param directory: The path of a directory containing delimited RepeatedAnyMsg messages in files
     :param msg_class: The type of the message contained in the RepeatedAnyMsg chunks
     """
     self.msg_class: Type[MsgClass] = msg_class
     self.repeated_any_msgs: List[RepeatedAnyMsg] = []
     self.chunk_start_idxs: List[int] = []
     self.cached_unpacked_msgs: Dict[int, MsgClass] = dict()
     cur_start_idx = 0
     for file in os.listdir(directory):
         filepath = os.path.join(directory, file)
         if file.isnumeric() and os.path.isfile(filepath):
             buf = open(filepath, "rb").read()
             msg_len, new_pos = _DecodeVarint32(buf, 0)
             repeated_any_msg = RepeatedAnyMsg()
             repeated_any_msg.ParseFromString(buf[new_pos:new_pos +
                                                  msg_len])
             self.repeated_any_msgs.append(repeated_any_msg)
             self.chunk_start_idxs.append(cur_start_idx)
             cur_start_idx += len(repeated_any_msg.messages)
예제 #26
0
def get_delimited_message_bytes(byte_stream, nr=4):
    ''' Parse a delimited protobuf message. This is done by first getting a protobuf varint from
    the stream that represents the length of the message, then reading that amount of
    from the message and then parse it.
    Since the int can be represented as max 4 bytes, first get 4 bytes and try to decode.
    The decoder returns the value and the position where the value was found, so we need
    to rewind the buffer to the position, because the remaining bytes belong to the message
    after.
    '''

    (length, pos) = decoder._DecodeVarint32(byte_stream.read(nr), 0)
    if log.getEffectiveLevel() == logging.DEBUG:
        log.debug("Delimited message length (pos %d): %d" % (pos, length))

    delimiter_bytes = nr - pos

    byte_stream.rewind(delimiter_bytes)
    message_bytes = byte_stream.read(length)
    if log.getEffectiveLevel() == logging.DEBUG:
        log.debug("Delimited message bytes (%d): %s" % (len(message_bytes), format_bytes(message_bytes)))

    total_len = length + pos
    return (total_len, message_bytes)
예제 #27
0
    def parse_delimited_single(data: bytes, pos=0):
        """
        Parse a single delimited entry from a byte stream
        Parameters
        ----------
        data : bytes
            The bytestream
        pos : int
            The starting position. Default is zero

        Returns
        -------
        pos : int
            Current position in the stream after parsing
        profile : DatasetProfile
            A dataset profile
        """
        msg_len, new_pos = _DecodeVarint32(data, pos)
        pos = new_pos
        msg_buf = data[pos:pos + msg_len]
        pos += msg_len
        profile = DatasetProfile.from_protobuf_string(msg_buf)
        return pos, profile
예제 #28
0
def processBuffer(buffer):
    """ processBuffer(buffer)
Process each message packet in WILL format and return an array of arrays of
x,y, lineWidth values.
The buffer should be the paths.protobuf file in the WILL mini filesystem.
This function walks through each message packet and decodifies it, assembling
the array of vectors x, y and linewidth

buffer : Whole protobuffer read from the WILL file.
    """

    xData = []
    yData = []
    lineData = []
    minX = 10000
    maxX = -10000
    minY = 10000
    maxY = -10000
    
    buffPos = 0
    while buffPos < len(buffer):
        messageLength, buffPos = _DecodeVarint32(buffer, buffPos)
        messageBytes = buffer[buffPos:buffPos+messageLength]
        buffPos += messageLength
        
        #print(f'messageLength = {messageLength}')
        #print('messageBytes:')
        #print(messageBytes)
        
        (x, y, lineWidth) = decodeMessagePacket(messageBytes)
        
        xData.append(x)
        yData.append(y)
        lineData.append(lineWidth)

        
    return (xData, yData, lineData)
예제 #29
0
def annotate_with_corenlp(text, output_document, full):
    text = text.encode('utf-8')

    # Fix non-breaking spaces.
    # TODO(klopyrev): Do this in the data collection code.
    text = text.replace('\xc2\xa0', ' ')

    properties = {
        'annotators':
        'tokenize,ssplit' + (',pos,ner,parse,depparse' if full else ''),
        'outputFormat':
        'serialized',
        'serializer':
        'edu.stanford.nlp.pipeline.ProtobufAnnotationSerializer',
        'timeout':
        '600000',
        'pos.maxlen':
        '200',
        'parse.maxlen':
        '200',
    }
    query = {'properties': json.dumps(properties)}
    while True:
        try:
            result = urllib2.urlopen(CORENLP_URL + '/?' +
                                     urllib.urlencode(query),
                                     data=text).read()
            break
        except urllib2.HTTPError as e:
            if e.code != httplib.INTERNAL_SERVER_ERROR:
                print >> sys.stderr, 'Received internal server error, will retry.'
                sys.stderr.flush()
                raise
            time.sleep(5.0)
    _, hdr_length = _DecodeVarint32(result, 0)
    output_document.ParseFromString(result[hdr_length:])
예제 #30
0
def recv_response(sock):
    """recv message from world
        Return byte string as result
        """
    var_int_buff = []
    count = 0

    while True:  # get the length of the message
        try:
            count += 1
            buf = sock.recv(1)
            var_int_buff += buf

            msg_len, new_pos = _DecodeVarint32(var_int_buff, 0)
            if new_pos != 0:
                break
        except:  # broken connection
            # sock.connect(self.addr)
            continue
    print(msg_len)
    whole_message = sock.recv(msg_len)
    # response = amazon_pb2.AConnected()
    # response.ParseFromString(whole_message)
    return whole_message
예제 #31
0
def iter_objects(url, DataClass):
    ''' Generate a stream of objects from the protobuf URL.
    '''
    response, position = requests.get(url), 0
    logger.debug('Got {} bytes: {}'.format(len(response.content), repr(response.content[:32])))

    if response.status_code not in range(200, 299):
        logger.debug('Got HTTP {}'.format(response.status_code))
        return

    while position < len(response.content):
        message_length, new_position = _DecodeVarint32(response.content, position)
        position = new_position
        message = response.content[position:position+message_length]
        position += message_length

        try:
            object = DataClass()
            object.ParseFromString(message)
        except google.protobuf.message.DecodeError:
            # Empty tile? Shrug.
            continue
        else:
            yield object
예제 #32
0
    def read(self) -> Optional[MessageType]:
        from google.protobuf.internal.decoder import _DecodeVarint32  # type: ignore
        # from https://cwiki.apache.org/confluence/display/GEODE/Delimiting+Protobuf+Messages
        current = b''
        while len(current) == 0 or current[-1] & 0x80 == 0x80:
            new = self.buffer.read(1)
            if not new:
                return None
            current = current + new
        size, new_pos = _DecodeVarint32(current, 0)
        assert new_pos == len(current)

        current = b''
        while len(current) < size:
            new = self.buffer.read(size - len(current))
            if not new:
                return None
            current = current + new
        assert (len(current) == size)

        message = self.message_type()
        message.ParseFromString(current)

        return message
예제 #33
0
    def run(self):
        """
        Spwans the Datareceiver task.

        Returns
        -------
        None.

        """
        # implement stop routine
        while not self._stop_event.is_set():
            data, addr = self.socket.recvfrom(
                1500)  # buffer size is 1024 bytes
            wasValidData = False
            wasValidDescription = False
            ProtoData = messages_pb2.DataMessage()
            ProtoDescription = messages_pb2.DescriptionMessage()
            SensorID = 0
            BytesProcessed = 4  # we need an offset of 4 sice
            if data[:4] == b"DATA":
                while BytesProcessed < len(data):
                    msg_len, new_pos = _DecodeVarint32(data, BytesProcessed)
                    BytesProcessed = new_pos

                    try:
                        msg_buf = data[new_pos:new_pos + msg_len]
                        ProtoData.ParseFromString(msg_buf)
                        wasValidData = True
                        SensorID = ProtoData.id
                        message = {
                            "ProtMsg": copy.deepcopy(ProtoData),
                            "Type": "Data"
                        }
                        BytesProcessed += msg_len
                    except:
                        pass  # ? no exception for wrong data type !!
                    if not (wasValidData or wasValidDescription):
                        print("INVALID PROTODATA")
                        pass  # invalid data leave parsing routine

                    if SensorID in self.AllSensors:
                        try:
                            self.AllSensors[SensorID].buffer.put_nowait(
                                message)
                        except:
                            tmp = self.packestlosforsensor[SensorID] = (
                                self.packestlosforsensor[SensorID] + 1)
                            if tmp == 1:
                                print("!!!! FATAL PERFORMANCE PROBLEMS !!!!")
                                print("FIRSTTIME packet lost for sensor ID:" +
                                      str(SensorID))
                                print(
                                    "DROP MESSAGES ARE ONLY PRINTETD EVERY 1000 DROPS FROM NOW ON !!!!!!!! "
                                )
                            if tmp % 1000 == 0:
                                print(
                                    "oh no lost an other  thousand packets :(")
                    else:
                        self.AllSensors[SensorID] = Sensor(SensorID)
                        print("FOUND NEW SENSOR WITH ID=hex" + hex(SensorID) +
                              "==>dec:" + str(SensorID))
                        self.packestlosforsensor[
                            SensorID] = 0  # initing lost packet counter
                    self.msgcount = self.msgcount + 1

                    if self.msgcount % self.params[
                            "PacketrateUpdateCount"] == 0:
                        print("received " +
                              str(self.params["PacketrateUpdateCount"]) +
                              " packets")
                        if self.lastTimestamp != 0:
                            timeDIFF = time.monotonic() - self.lastTimestamp
                            self.Datarate = (
                                self.params["PacketrateUpdateCount"] /
                                timeDIFF)
                            print("Update rate is " + str(self.Datarate) +
                                  " Hz")
                            self.lastTimestamp = time.monotonic()
                        else:
                            self.lastTimestamp = time.monotonic()
            elif data[:4] == b"DSCP":
                while BytesProcessed < len(data):
                    msg_len, new_pos = _DecodeVarint32(data, BytesProcessed)
                    BytesProcessed = new_pos
                    try:
                        msg_buf = data[new_pos:new_pos + msg_len]
                        ProtoDescription.ParseFromString(msg_buf)
                        # print(msg_buf)
                        wasValidData = True
                        SensorID = ProtoDescription.id
                        message = {
                            "ProtMsg": ProtoDescription,
                            "Type": "Description"
                        }
                        BytesProcessed += msg_len
                    except:
                        pass  # ? no exception for wrong data type !!
                    if not (wasValidData or wasValidDescription):
                        print("INVALID PROTODATA")
                        pass  # invalid data leave parsing routine

                    if SensorID in self.AllSensors:
                        try:
                            self.AllSensors[SensorID].buffer.put_nowait(
                                message)
                        except:
                            print("packet lost for sensor ID:" + hex(SensorID))
                    else:
                        self.AllSensors[SensorID] = Sensor(SensorID)
                        print("FOUND NEW SENSOR WITH ID=hex" + hex(SensorID) +
                              " dec==>:" + str(SensorID))
                    self.msgcount = self.msgcount + 1

                    if self.msgcount % self.params[
                            "PacketrateUpdateCount"] == 0:
                        print("received " +
                              str(self.params["PacketrateUpdateCount"]) +
                              " packets")
                        if self.lastTimestamp != 0:
                            timeDIFF = time.monotonic() - self.lastTimestamp
                            self.Datarate = (
                                self.params["PacketrateUpdateCount"] /
                                timeDIFF)
                            print("Update rate is " + str(self.Datarate) +
                                  " Hz")
                            self.lastTimestamp = time.monotonic()
                        else:
                            self.lastTimestamp = time.monotonic()
            else:
                print("unrecognized packed preamble" + str(data[:5]))
예제 #34
0
def get_archive_info_and_remainder(buf):
    msg_len, new_pos = _DecodeVarint32(buf, 0)
    n = new_pos
    msg_buf = buf[n:n + msg_len]
    n += msg_len
    return ArchiveInfo.FromString(msg_buf), buf[n:]
예제 #35
0
 def get_next_length(self,tx):
     field, pos = _DecodeVarint32(tx,0)
     size, newpos = _DecodeVarint32(tx,pos)
     if (field&0x07==0): return newpos
     return size + newpos
예제 #36
0
    def run(self):
        """
        

        Returns
        -------
        None.

        """
        # implement stop routine
        while not self._stop_event.is_set():
            data, addr = self.socket.recvfrom(1500)  # buffer size is 1024 bytes
            wasValidData = False
            wasValidDescription = False
            ProtoData = messages_pb2.DataMessage()
            ProtoDescription = messages_pb2.DescriptionMessage()
            SensorID = 0
            BytesProcessed = 4  # we need an offset of 4 sice
            if data[:4] == b"DATA":
                while BytesProcessed < len(data):
                    msg_len, new_pos = _DecodeVarint32(data, BytesProcessed)
                    BytesProcessed = new_pos

                    try:
                        msg_buf = data[new_pos : new_pos + msg_len]
                        ProtoData.ParseFromString(msg_buf)
                        wasValidData = True
                        SensorID = ProtoData.id
                        message = {"ProtMsg": copy.deepcopy(ProtoData), "Type": "Data"}
                        BytesProcessed += msg_len
                    except:
                        pass  # ? no exception for wrong data type !!
                    if not (wasValidData or wasValidDescription):
                        print("INVALID PROTODATA")
                        pass  # invalid data leave parsing routine

                    if SensorID in self.AllSensors:
                        try:
                            self.AllSensors[SensorID].buffer.put_nowait(message)
                        except:
                            print("packet lost for sensor ID:" + str(SensorID))
                    else:
                        self.AllSensors[SensorID] = Sensor(SensorID)
                        print(
                            "FOUND NEW SENSOR WITH ID=hex"
                            + hex(SensorID)
                            + "==>dec:"
                            + str(SensorID)
                        )
                    self.msgcount = self.msgcount + 1

                    if self.msgcount % self.params["PacketrateUpdateCount"] == 0:
                        print(
                            "received "
                            + str(self.params["PacketrateUpdateCount"])
                            + " packets"
                        )
                        if self.lastTimestamp != 0:
                            timeDIFF = datetime.now() - self.lastTimestamp
                            timeDIFF = timeDIFF.seconds + timeDIFF.microseconds * 1e-6
                            self.Datarate = (
                                self.params["PacketrateUpdateCount"] / timeDIFF
                            )
                            print("Update rate is " + str(self.Datarate) + " Hz")
                            self.lastTimestamp = datetime.now()
                        else:
                            self.lastTimestamp = datetime.now()
            elif data[:4] == b"DSCP":
                while BytesProcessed < len(data):
                    msg_len, new_pos = _DecodeVarint32(data, BytesProcessed)
                    BytesProcessed = new_pos
                    try:
                        msg_buf = data[new_pos : new_pos + msg_len]
                        ProtoDescription.ParseFromString(msg_buf)
                        # print(msg_buf)
                        wasValidData = True
                        SensorID = ProtoDescription.id
                        message = {"ProtMsg": ProtoDescription, "Type": "Description"}
                        BytesProcessed += msg_len
                    except:
                        pass  # ? no exception for wrong data type !!
                    if not (wasValidData or wasValidDescription):
                        print("INVALID PROTODATA")
                        pass  # invalid data leave parsing routine

                    if SensorID in self.AllSensors:
                        try:
                            self.AllSensors[SensorID].buffer.put_nowait(message)
                        except:
                            print("packet lost for sensor ID:" + hex(SensorID))
                    else:
                        self.AllSensors[SensorID] = Sensor(SensorID)
                        print(
                            "FOUND NEW SENSOR WITH ID=hex"
                            + hex(SensorID)
                            + " dec==>:"
                            + str(SensorID)
                        )
                    self.msgcount = self.msgcount + 1

                    if self.msgcount % self.params["PacketrateUpdateCount"] == 0:
                        print(
                            "received "
                            + str(self.params["PacketrateUpdateCount"])
                            + " packets"
                        )
                        if self.lastTimestamp != 0:
                            timeDIFF = datetime.now() - self.lastTimestamp
                            timeDIFF = timeDIFF.seconds + timeDIFF.microseconds * 1e-6
                            self.Datarate = (
                                self.params["PacketrateUpdateCount"] / timeDIFF
                            )
                            print("Update rate is " + str(self.Datarate) + " Hz")
                            self.lastTimestamp = datetime.now()
                        else:
                            self.lastTimestamp = datetime.now()
            else:
                print("unrecognized packed preamble" + str(data[:5]))
예제 #37
0
파일: task.py 프로젝트: zizon/prpc
 def _read_varint32(self, raw, start):
     return _DecodeVarint32(raw, start)
예제 #38
0
def decodeMessagePacket(messageBytes):
    """ decodeMessagePacket(messageBytes):
    Decodes the message package in the WILL format and returns the vector of
    location ponts x, y and the linewidths.
    It assumes that the message is in the format specified in the WILL file format
    form Wacom. 
    Future versions might break this code, since its based on the format from 2018.


    messageBytes : message to decode
    """

    nextPos = 0
    
    # startParameter  [FLOAT]
    #  (varint header)
    msgVarintIdentifier, nextPos = _DecodeVarint32(messageBytes, nextPos)
    field, wireType = wire_format.UnpackTag(msgVarintIdentifier)
    #  value
    startParameter =struct.unpack('f',messageBytes[nextPos:nextPos+4])[0]
    nextPos += 4

    debugPrint(f'field: {field}, wire type:{wireType}')
    debugPrint(f'startParameter: {startParameter}')


    # stopParameter  [FLOAT]
    #  (varint header)
    msgVarintIdentifier, nextPos = _DecodeVarint32(messageBytes, nextPos)
    field, wireType = wire_format.UnpackTag(msgVarintIdentifier)
    #  value
    stopParameter = struct.unpack('f',messageBytes[nextPos:nextPos+4])[0]
    nextPos += 4

    debugPrint(f'field: {field}, wire type:{wireType}')
    debugPrint(f'stopParameter: {stopParameter}')

    
    # decimalPrecision  [VARIANT]
    #  (varint header)
    msgVarintIdentifier, nextPos = _DecodeVarint32(messageBytes, nextPos)
    field, wireType = wire_format.UnpackTag(msgVarintIdentifier)
    #  value
    decimalPrecision, nextPos = _DecodeVarint32(messageBytes, nextPos)

    debugPrint(f'field: {field}, wire type:{wireType}')
    debugPrint(f'decimalPrecision: {decimalPrecision}')
    

    # x,y sequence  [BYTE STRING]
    #  (varint header)
    msgVarintIdentifier, nextPos = _DecodeVarint32(messageBytes, nextPos)
    field, wireType = wire_format.UnpackTag(msgVarintIdentifier)
    #  value
    strLen, nextPos = _DecodeVarint32(messageBytes, nextPos)
    xyBytes = messageBytes[nextPos:nextPos+strLen]
    nextPos += strLen
    xyList = decodeVarintArray(xyBytes)
    
    dx = xyList[0::2]
    dy = xyList[1::2]
    
    factor = 10**decimalPrecision
    
    x = cumsum(dx, factor)
    y = cumsum(dy, factor)
    
    debugPrint(f'field: {field}, wire type:{wireType}')
    debugPrint(f'strLen: {strLen}')
    debugPrint(f'x [{len(x)}]: {x[0:10]}...')
    debugPrint(f'y [{len(y)}]: {y[0:10]}...')




    # stroke width  [BYTE STRING]
    #  (varint header)
    msgVarintIdentifier, nextPos = _DecodeVarint32(messageBytes, nextPos)
    field, wireType = wire_format.UnpackTag(msgVarintIdentifier)
    #  value
    strLen, nextPos = _DecodeVarint32(messageBytes, nextPos)
    strokeWidthBytes = messageBytes[nextPos:nextPos+strLen]
    nextPos += strLen
    strokeWidthList = decodeVarintArray(strokeWidthBytes)
    
    strokeWidths = cumsum(strokeWidthList, 1.0)
    
    debugPrint(f'field: {field}, wire type:{wireType}')
    debugPrint(f'strLen: {strLen}')
    debugPrint(f'strokeWidthList [{len(strokeWidths)}]: {strokeWidths[0:10]}...')
    

    # color values  [BYTE STRING]
    #  (varint header)
    msgVarintIdentifier, nextPos = _DecodeVarint32(messageBytes, nextPos)
    field, wireType = wire_format.UnpackTag(msgVarintIdentifier)
    #  value
    strLen, nextPos = _DecodeVarint32(messageBytes, nextPos)
    colorValuesBytes = messageBytes[nextPos:nextPos+strLen]
    nextPos += strLen
    colorValueList = decodeVarintArray(colorValuesBytes)
    
    debugPrint(f'field: {field}, wire type:{wireType}')
    debugPrint(f'strLen: {strLen}')
    debugPrint(f'colorValueList [{len(colorValueList)}]: {colorValueList}')

    debugPrint()
    
    # debugPrint(struct.unpack('BBBB',messageBytes[nextPos:]))

    lineWidth = 0.01*sum(strokeWidths)/len(strokeWidths)
    
    #plt.plot(x,y,'k', linewidth=(lineWidth**5)/20)

    #plt.axis('equal')
    
    return (x, y, lineWidth)
예제 #39
0
 def decode_varint(self, data):
     """ Decode a protobuf varint to an int """
     return _DecodeVarint32(data, 0)[0]
예제 #40
0
 def _get_file_block(self):
     buf = self._file.read(4)
     len, offset = _DecodeVarint32(buf, 0)
     buf = buf[offset:] + self._file.read(len - 4 + offset)
     return buf
def load_demonstration(file_path: str) -> Tuple[BrainParameters, List[BrainInfo], int]:
    """
    Loads and parses a demonstration file.
    :param file_path: Location of demonstration file (.demo).
    :return: BrainParameter and list of BrainInfos containing demonstration data.
    """

    # First 32 bytes of file dedicated to meta-data.
    INITIAL_POS = 33
    file_paths = []
    if os.path.isdir(file_path):
        all_files = os.listdir(file_path)
        for _file in all_files:
            if _file.endswith(".demo"):
                file_paths.append(os.path.join(file_path, _file))
        if not all_files:
            raise ValueError("There are no '.demo' files in the provided directory.")
    elif os.path.isfile(file_path):
        file_paths.append(file_path)
        file_extension = pathlib.Path(file_path).suffix
        if file_extension != ".demo":
            raise ValueError(
                "The file is not a '.demo' file. Please provide a file with the "
                "correct extension."
            )
    else:
        raise FileNotFoundError(
            "The demonstration file or directory {} does not exist.".format(file_path)
        )

    brain_params = None
    brain_param_proto = None
    brain_infos = []
    total_expected = 0
    for _file_path in file_paths:
        data = open(_file_path, "rb").read()
        next_pos, pos, obs_decoded = 0, 0, 0
        while pos < len(data):
            next_pos, pos = _DecodeVarint32(data, pos)
            if obs_decoded == 0:
                meta_data_proto = DemonstrationMetaProto()
                meta_data_proto.ParseFromString(data[pos : pos + next_pos])
                total_expected += meta_data_proto.number_steps
                pos = INITIAL_POS
            if obs_decoded == 1:
                brain_param_proto = BrainParametersProto()
                brain_param_proto.ParseFromString(data[pos : pos + next_pos])

                pos += next_pos
            if obs_decoded > 1:
                agent_info = AgentInfoProto()
                agent_info.ParseFromString(data[pos : pos + next_pos])
                if brain_params is None:
                    brain_params = BrainParameters.from_proto(
                        brain_param_proto, agent_info
                    )
                brain_info = BrainInfo.from_agent_proto(0, [agent_info], brain_params)
                brain_infos.append(brain_info)
                if len(brain_infos) == total_expected:
                    break
                pos += next_pos
            obs_decoded += 1
    return brain_params, brain_infos, total_expected
예제 #42
0
 def process(self, data):
     for m in self.framer.process(data):
         lval, newpos = _DecodeVarint32(m, 0)
         encdata = m[newpos:(newpos + lval)]
         yield encdata, m[(newpos + lval):]
예제 #43
0
def get_message_contents(raw_data):
    (data_size, data_size_len) = decoder._DecodeVarint32(raw_data, 0)
    return (raw_data[data_size_len:data_size_len+data_size], data_size_len + data_size)
예제 #44
0
    def parse_metric_family(self, response):
        """
        Parse the MetricFamily from a valid requests.Response object to provide a MetricFamily object (see [0])

        The text format uses iter_lines() generator.

        The protobuf format directly parse the response.content property searching for Prometheus messages of type
        MetricFamily [0] delimited by a varint32 [1] when the content-type is a `application/vnd.google.protobuf`.

        [0] https://github.com/prometheus/client_model/blob/086fe7ca28bde6cec2acd5223423c1475a362858/metrics.proto#L76-%20%20L81  # noqa: E501
        [1] https://developers.google.com/protocol-buffers/docs/reference/java/com/google/protobuf/AbstractMessageLite#writeDelimitedTo(java.io.OutputStream)  # noqa: E501

        :param response: requests.Response
        :return: metrics_pb2.MetricFamily()
        """
        if 'application/vnd.google.protobuf' in response.headers[
                'Content-Type']:
            n = 0
            buf = response.content
            while n < len(buf):
                msg_len, new_pos = _DecodeVarint32(buf, n)
                n = new_pos
                msg_buf = buf[n:n + msg_len]
                n += msg_len

                message = metrics_pb2.MetricFamily()
                message.ParseFromString(msg_buf)
                message.name = self.remove_metric_prefix(message.name)

                # Lookup type overrides:
                if self.type_overrides and message.name in self.type_overrides:
                    new_type = self.type_overrides[message.name]
                    if new_type in self.METRIC_TYPES:
                        message.type = self.METRIC_TYPES.index(new_type)
                    else:
                        self.log.debug(
                            "type override %s for %s is not a valid type name",
                            new_type, message.name)
                yield message

        elif 'text/plain' in response.headers['Content-Type']:
            input_gen = response.iter_lines(
                chunk_size=self.REQUESTS_CHUNK_SIZE, decode_unicode=True)
            if self._text_filter_blacklist:
                input_gen = self._text_filter_input(input_gen)

            messages = defaultdict(
                list)  # map with the name of the element (before the labels)
            # and the list of occurrences with labels and values

            obj_map = {}  # map of the types of each metrics
            obj_help = {}  # help for the metrics
            for metric in text_fd_to_metric_families(input_gen):
                metric.name = self.remove_metric_prefix(metric.name)
                metric_name = "%s_bucket" % metric.name if metric.type == "histogram" else metric.name
                metric_type = self.type_overrides.get(metric_name, metric.type)
                if metric_type == "untyped" or metric_type not in self.METRIC_TYPES:
                    continue

                for sample in metric.samples:
                    if (sample[0].endswith("_sum") or
                            sample[0].endswith("_count")) and metric_type in [
                                "histogram",
                                "summary",
                            ]:
                        messages[sample[0]].append({
                            "labels": sample[1],
                            'value': sample[2]
                        })
                    else:
                        messages[metric_name].append({
                            "labels": sample[1],
                            'value': sample[2]
                        })

                obj_map[metric.name] = metric_type
                obj_help[metric.name] = metric.documentation

            for _m in obj_map:
                if _m in messages or (obj_map[_m] == 'histogram' and
                                      ('{}_bucket'.format(_m) in messages)):
                    yield self._extract_metric_from_map(
                        _m, messages, obj_map, obj_help)
        else:
            raise UnknownFormatError(
                'Unsupported content-type provided: {}'.format(
                    response.headers['Content-Type']))
예제 #45
0
def record_positions(buffer, beginning):
    msg_len, new_pos = _DecodeVarint32(buffer, beginning)
    return new_pos, new_pos + msg_len
예제 #46
0
    def agent_loop(self):
        data, addr = self.socket.recvfrom(1500)  # buffer size is 1024 bytes
        wasValidData = False
        wasValidDescription = False

        ProtoData = messages_pb2.DataMessage()
        ProtoDescription = messages_pb2.DescriptionMessage()
        SensorID = 0
        BytesProcessed = 4  # we need an offset of 4 sice
        if data[:4] == b"DATA":
            while BytesProcessed < len(data):
                msg_len, new_pos = _DecodeVarint32(data, BytesProcessed)
                BytesProcessed = new_pos

                try:
                    msg_buf = data[new_pos:new_pos + msg_len]
                    ProtoData.ParseFromString(msg_buf)
                    wasValidData = True
                    SensorID = ProtoData.id
                    message = {"ProtMsg": msg_buf, "Type": "Data"}
                    BytesProcessed += msg_len
                except:
                    pass  # ? no exception for wrong data type !!
                if not (wasValidData or wasValidDescription):
                    print("INVALID PROTODATA")
                    pass  # invalid data leave parsing routine

                if SensorID in self.AllSensors:
                    try:
                        self.AllSensors[SensorID].send_output(message)
                    except:
                        tmp = self.packestlosforsensor[SensorID] = (
                            self.packestlosforsensor[SensorID] + 1)
                        if tmp == 1:
                            print("!!!! FATAL PERFORMANCE PROBLEMS !!!!")
                            print("FIRSTTIME packet lost for sensor ID:" +
                                  str(SensorID))
                            print(
                                "DROP MESSAGES ARE ONLY PRINTETD EVERY 1000 DROPS FROM NOW ON !!!!!!!! "
                            )
                        if tmp % 1000 == 0:
                            print("oh no lost an other  thousand packets :(")
                else:
                    self.AllSensors[SensorID] = self.agentNetwork.add_agent(
                        agentType=SensorAgent, log_mode=False, ID=SensorID)
                    self.agentNetwork.add_coalition(
                        "Sensor_Group_1",
                        [self] + list(self.AllSensors.values()))
                    print("FOUND NEW SENSOR WITH ID=hex" + hex(SensorID) +
                          "==>dec:" + str(SensorID))
                    self.packestlosforsensor[
                        SensorID] = 0  # initing lost packet counter
                self.msgcount = self.msgcount + 1

                if self.msgcount % self.params["PacketrateUpdateCount"] == 0:
                    print("received " +
                          str(self.params["PacketrateUpdateCount"]) +
                          " packets")
                    if self.lastTimestamp != 0:
                        timeDIFF = datetime.now() - self.lastTimestamp
                        timeDIFF = timeDIFF.seconds + timeDIFF.microseconds * 1e-6
                        self.Datarate = (self.params["PacketrateUpdateCount"] /
                                         timeDIFF)
                        print("Update rate is " + str(self.Datarate) + " Hz")
                        self.lastTimestamp = datetime.now()
                    else:
                        self.lastTimestamp = datetime.now()
        elif data[:4] == b"DSCP":
            while BytesProcessed < len(data):
                msg_len, new_pos = _DecodeVarint32(data, BytesProcessed)
                BytesProcessed = new_pos
                try:
                    msg_buf = data[new_pos:new_pos + msg_len]
                    ProtoDescription.ParseFromString(msg_buf)
                    # print(msg_buf)
                    wasValidData = True
                    SensorID = ProtoDescription.id
                    message = {"ProtMsg": msg_buf, "Type": "Description"}
                    BytesProcessed += msg_len
                except:
                    pass  # ? no exception for wrong data type !!
                if not (wasValidData or wasValidDescription):
                    print("INVALID PROTODATA")
                    pass  # invalid data leave parsing routine

                if SensorID in self.AllSensors:
                    try:
                        self.AllSensors[SensorID].buffer.put_nowait(message)
                    except:
                        print("packet lost for sensor ID:" + hex(SensorID))
                else:
                    self.AllSensors[SensorID] = self.agentNetwork.add_agent(
                        agentType=SensorAgent, log_mode=False, ID=SensorID)
                    self.agentNetwork.add_coalition(
                        "Sensor_Group_1",
                        [self] + list(self.AllSensors.values()))
                    print("FOUND NEW SENSOR WITH ID=hex" + hex(SensorID) +
                          " dec==>:" + str(SensorID))
                self.msgcount = self.msgcount + 1

                if self.msgcount % self.params["PacketrateUpdateCount"] == 0:
                    print("received " +
                          str(self.params["PacketrateUpdateCount"]) +
                          " packets")
                    if self.lastTimestamp != 0:
                        timeDIFF = datetime.now() - self.lastTimestamp
                        timeDIFF = timeDIFF.seconds + timeDIFF.microseconds * 1e-6
                        self.Datarate = (self.params["PacketrateUpdateCount"] /
                                         timeDIFF)
                        print("Update rate is " + str(self.Datarate) + " Hz")
                        self.lastTimestamp = datetime.now()
                    else:
                        self.lastTimestamp = datetime.now()
        else:
            print("unrecognized packed preamble" + str(data[:5]))
예제 #47
0
    def handle_connection(self):
        """
        Run connection handling routine.
        """

        context = SSLContext(PROTOCOL_TLS_CLIENT)
        context.check_hostname = False
        context.verify_mode = CERT_NONE

        with create_connection(
            (self.server_host, self.server_port)) as self.socket:
            with context.wrap_socket(
                    self.socket, server_hostname=self.server_host) as self.tls:

                # Use this buffer for all reads
                read_buffer = bytearray(4096)

                # Perform SID handshake
                rq = RQ_Cvid()
                rq.uuid = os.environ["S7S_UUID"]
                rq.instance = InstanceType.CLIENT
                rq.instance_flavor = InstanceFlavor.CLIENT_BRIGHTSTONE

                msg = MSG()
                setattr(msg, "payload", rq.SerializeToString())
                setattr(msg, "id", randint(0, 65535))

                self.tls.write(_VarintBytes(msg.ByteSize()))
                self.tls.sendall(msg.SerializeToString())

                read = self.tls.recv_into(read_buffer, 4096)
                if read == 0:
                    raise EOFError("")

                msg_len, msg_start = _DecodeVarint32(read_buffer, 0)

                msg = MSG()
                msg.ParseFromString(read_buffer[msg_start:msg_start + msg_len])

                rs = RS_Cvid()
                rs.ParseFromString(msg.payload)

                self.server_cvid = rs.server_cvid
                self.sid = rs.sid

                # The connection is now connected
                with self.connection_state_cv:
                    self.connection_state = ConnectionState.CONNECTED
                    self.connection_state_cv.notify()

                # Begin accepting messages
                while True:

                    # TODO there may be another message in the read buffer

                    # Read from the socket
                    read = self.tls.recv_into(read_buffer, 4096)
                    if read == 0:
                        raise EOFError("")

                    n = 0
                    while n < read:
                        msg_len, n = _DecodeVarint32(read_buffer, n)

                        msg = MSG()
                        msg.ParseFromString(read_buffer[n:n + msg_len])

                        # Place message in response map
                        with self.response_map_cv:
                            self.response_map[msg.id] = msg
                            self.response_map_cv.notify()

        # The connection is now closed
        with self.connection_state_cv:
            self.connection_state = ConnectionState.CLOSED
            self.connection_state_cv.notify()
예제 #48
0
def load_demonstration(
    file_path: str
) -> Tuple[BrainParameters, List[AgentInfoActionPairProto], int]:
    """
    Loads and parses a demonstration file.
    :param file_path: Location of demonstration file (.demo).
    :return: BrainParameter and list of AgentInfoActionPairProto containing demonstration data.
    """

    # First 32 bytes of file dedicated to meta-data.
    INITIAL_POS = 33
    file_paths = []
    if os.path.isdir(file_path):
        all_files = os.listdir(file_path)
        for _file in all_files:
            if _file.endswith(".demo"):
                file_paths.append(os.path.join(file_path, _file))
        if not all_files:
            raise ValueError("There are no '.demo' files in the provided directory.")
    elif os.path.isfile(file_path):
        file_paths.append(file_path)
        file_extension = pathlib.Path(file_path).suffix
        if file_extension != ".demo":
            raise ValueError(
                "The file is not a '.demo' file. Please provide a file with the "
                "correct extension."
            )
    else:
        raise FileNotFoundError(
            "The demonstration file or directory {} does not exist.".format(file_path)
        )

    group_spec = None
    brain_param_proto = None
    info_action_pairs = []
    total_expected = 0
    for _file_path in file_paths:
        with open(_file_path, "rb") as fp:
            with hierarchical_timer("read_file"):
                data = fp.read()
            next_pos, pos, obs_decoded = 0, 0, 0
            while pos < len(data):
                next_pos, pos = _DecodeVarint32(data, pos)
                if obs_decoded == 0:
                    meta_data_proto = DemonstrationMetaProto()
                    meta_data_proto.ParseFromString(data[pos : pos + next_pos])
                    total_expected += meta_data_proto.number_steps
                    pos = INITIAL_POS
                if obs_decoded == 1:
                    brain_param_proto = BrainParametersProto()
                    brain_param_proto.ParseFromString(data[pos : pos + next_pos])
                    pos += next_pos
                if obs_decoded > 1:
                    agent_info_action = AgentInfoActionPairProto()
                    agent_info_action.ParseFromString(data[pos : pos + next_pos])
                    if group_spec is None:
                        group_spec = agent_group_spec_from_proto(
                            brain_param_proto, agent_info_action.agent_info
                        )
                    info_action_pairs.append(agent_info_action)
                    if len(info_action_pairs) == total_expected:
                        break
                    pos += next_pos
                obs_decoded += 1
    if not group_spec:
        raise RuntimeError(
            f"No BrainParameters found in demonstration file at {file_path}."
        )
    return group_spec, info_action_pairs, total_expected
예제 #49
0
def main():
    print('starting')
    with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
        print('starting')
        s.bind((HOST, PORT))
        s.listen()
        conn, addr = s.accept()

        var_int_buff = []
        while True:
            buf = conn.recv(1)
            var_int_buff += buf
            msg_len, new_pos = _DecodeVarint32(var_int_buff, 0)
            if new_pos != 0:
                break
        whole_message = conn.recv(msg_len)
        print(whole_message)

        # conn.sendall(data)
        # u_connect = world_ups_pb2.UConnect()
        # u_connect.ParseFromString(whole_message)
        # print(u_connect)

        ua_commands = ups_amazon_pb2.UACommands()
        ua_commands.ParseFromString(whole_message)
        print(ua_commands)

        for init_world in ua_commands.worlds:
            world_socket = connect_world_server()
            worldid, result = connect_world(world_socket, init_world.worldid)

            print(result)
            break

        msg = ups_amazon_pb2.AUCommands()
        warehouse = msg.warehouses.add()
        warehouse.whid = 1
        warehouse.wh_x = 5
        warehouse.wh_y = 6
        warehouse.seqnum = 1
        package = warehouse.packageinfos.add()
        package.description = 'description'
        package.count = 10
        package.packageid = 1
        package.x = 10
        package.y = 20
        package.upsaccount = 'user'

        send_msg(conn, msg)
        while True:
            print('Connected by', addr)
            var_int_buff = []
            while True:
                buf = conn.recv(1)
                var_int_buff += buf
                msg_len, new_pos = _DecodeVarint32(var_int_buff, 0)
                if new_pos != 0:
                    break
            whole_message = conn.recv(msg_len)
            print(whole_message)

            # conn.sendall(data)
            # u_connect = world_ups_pb2.UConnect()
            # u_connect.ParseFromString(whole_message)
            # print(u_connect)

            ua_commands = ups_amazon_pb2.UACommands()
            ua_commands.ParseFromString(whole_message)
            print(ua_commands)

        conn.close()
예제 #50
0
import metric_pb2

from google.protobuf.internal.encoder import _VarintBytes
from google.protobuf.internal.decoder import _DecodeVarint32

with open('out.bin', 'wb') as f:
    for i in range(128):
        my_metric = metric_pb2.Metric()
        my_metric.name = 'sys.cpu'
        my_metric.type = 'gauge'
        my_metric.value = i
        my_metric.tags = str(i) + 'tag'
        size = my_metric.ByteSize()
        f.write(_VarintBytes(size))
        f.write(my_metric.SerializeToString())

with open('out.bin', 'rb') as f:
    buf = f.read()
    n = 0
    while n < len(buf):
        msg_len, new_pos = _DecodeVarint32(buf, n)
        n = new_pos
        msg_buf = buf[n:n + msg_len]
        n += msg_len
        read_metric = metric_pb2.Metric()
        read_metric.ParseFromString(msg_buf)
        # do something with read_metric
        print(read_metric)
예제 #51
0
    def parse_metric_family(self, response):
        """
        Parse the MetricFamily from a valid requests.Response object to provide a MetricFamily object (see [0])

        The text format uses iter_lines() generator.

        The protobuf format directly parse the response.content property searching for Prometheus messages of type
        MetricFamily [0] delimited by a varint32 [1] when the content-type is a `application/vnd.google.protobuf`.

        [0] https://github.com/prometheus/client_model/blob/086fe7ca28bde6cec2acd5223423c1475a362858/metrics.proto#L76-%20%20L81
        [1] https://developers.google.com/protocol-buffers/docs/reference/java/com/google/protobuf/AbstractMessageLite#writeDelimitedTo(java.io.OutputStream)

        :param response: requests.Response
        :return: metrics_pb2.MetricFamily()
        """
        if 'application/vnd.google.protobuf' in response.headers['Content-Type']:
            n = 0
            buf = response.content
            while n < len(buf):
                msg_len, new_pos = _DecodeVarint32(buf, n)
                n = new_pos
                msg_buf = buf[n:n+msg_len]
                n += msg_len

                message = metrics_pb2.MetricFamily()
                message.ParseFromString(msg_buf)

                # Lookup type overrides:
                if self.type_overrides and message.name in self.type_overrides:
                    new_type = self.type_overrides[message.name]
                    if new_type in self.METRIC_TYPES:
                        message.type = self.METRIC_TYPES.index(new_type)
                    else:
                        self.log.debug("type override %s for %s is not a valid type name" % (new_type, message.name))
                yield message

        elif 'text/plain' in response.headers['Content-Type']:
            messages = defaultdict(list)  # map with the name of the element (before the labels)
            # and the list of occurrences with labels and values

            obj_map = {}  # map of the types of each metrics
            obj_help = {}  # help for the metrics
            for metric in text_fd_to_metric_families(response.iter_lines(chunk_size=self.REQUESTS_CHUNK_SIZE)):
                metric_name = "%s_bucket" % metric.name if metric.type == "histogram" else metric.name
                metric_type = self.type_overrides.get(metric_name, metric.type)
                if metric_type == "untyped" or metric_type not in self.METRIC_TYPES:
                    continue

                for sample in metric.samples:
                    if (sample[0].endswith("_sum") or sample[0].endswith("_count")) and \
                            metric_type in ["histogram", "summary"]:
                        messages[sample[0]].append({"labels": sample[1], 'value': sample[2]})
                    else:
                        messages[metric_name].append({"labels": sample[1], 'value': sample[2]})

                obj_map[metric.name] = metric_type
                obj_help[metric.name] = metric.documentation

            for _m in obj_map:
                if _m in messages or (obj_map[_m] == 'histogram' and ('{}_bucket'.format(_m) in messages)):
                    yield self._extract_metric_from_map(_m, messages, obj_map, obj_help)
        else:
            raise UnknownFormatError('Unsupported content-type provided: {}'.format(
                response.headers['Content-Type']))