Exemple #1
0
def pack(src, cgi_type, use_compress=0):
    # 必要参数合法性判定
    if not Util.cookie or not Util.uin or not Util.sessionKey:
        return b''
    # 压缩加密
    len_proto_compressed = len(src)
    if use_compress:
        (body, len_proto_compressed) = Util.compress_and_aes(src, Util.sessionKey)
    else:
        body = Util.aes(src, Util.sessionKey)
    logger.debug("cgi:{},protobuf数据:{}\n加密后数据:{}".format(cgi_type, Util.b2hex(src), Util.b2hex(body)))
    # 封包包头
    header = bytearray(0)
    header += b'\xbf'                                                                               # 标志位(可忽略该字节)
    header += bytes([0])                                                                            # 最后2bit:02--包体不使用压缩算法;前6bit:包头长度,最后计算      
    header += bytes([((0x5 << 4) + 0xf)])                                                           # 05:AES加密算法  0xf:cookie长度(默认使用15字节长的cookie)
    header += struct.pack(">I", define.__CLIENT_VERSION__)                                          # 客户端版本号 网络字节序
    header += struct.pack(">i", Util.uin)                                                           # uin
    header += Util.cookie                                                                           # coockie
    header += encoder._VarintBytes(cgi_type)                                                        # cgi type
    header += encoder._VarintBytes(len(src))                                                        # body proto压缩前长度
    header += encoder._VarintBytes(len_proto_compressed)                                            # body proto压缩后长度
    header += bytes([0]*15)                                                                         # 3个未知变长整数参数,共15字节
    header[1] = (len(header) << 2) + (1 if use_compress else 2)                                     # 包头长度
    logger.debug("包头数据:{}".format(Util.b2hex(header)))
    # 组包
    senddata = header + body
    return senddata
Exemple #2
0
    def getBytes(self, protoPacket = 0):

        requestBytes = self.packet_type
        if protoPacket == 0:
            requestBytes += GoogleProtobufEncoder._VarintBytes(0)
        else:
            protoRequestBytes = protoPacket.SerializeToString()
            requestBytes += GoogleProtobufEncoder._VarintBytes(len(protoRequestBytes))
            requestBytes += protoRequestBytes

        return requestBytes
Exemple #3
0
def send(msg):
    string = msg.SerializeToString()
    string = encoder._VarintBytes(len(string)) + string
    s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
    s.connect("/tmp/slim_socket") 
    s.send(string)
    s.close()
def delimitProtobuf(src):
    """Python protobuf bindings are missing writeDelimited, this is a workaround to add a delimiter"""
    from google.protobuf.internal import encoder

    serializedMessage = src.SerializeToString()
    delimiter = encoder._VarintBytes(len(serializedMessage))

    return delimiter + serializedMessage
def send_over_socket(sock, message):
    delimiter = encoder._VarintBytes(len(message))
    message = delimiter + message
    msg_len = len(message)
    total_sent = 0
    while total_sent < msg_len:
        sent = sock.send(message[total_sent:])
        if sent == 0:
            raise RuntimeError('Socket connection broken')
        total_sent = total_sent + sent
Exemple #6
0
def save_protobuf(filename, feature_trajectories):
    """
    save a features in the given filename
    """
    with open(filename, 'wb') as f:
        for features in feature_trajectories:
            for fea in features:
                serializedMessage = fea.SerializeToString()
                delimiter = encoder._VarintBytes(len(serializedMessage))
                f.write(delimiter + serializedMessage)
Exemple #7
0
def pack(mtype, request, rid=None):
    '''pack request to delimited data'''
    envelope = wire.Envelope() 
    if rid is not None:
        envelope.id = rid
    envelope.type = mtype
    envelope.message = request.SerializeToString()
    data = envelope.SerializeToString()
    data = encoder._VarintBytes(len(data)) + data
    return data
  def sendCommand(self, request, cbinfo):
    rpchead = RpcPayloadHeader_pb2.RpcPayloadHeaderProto()

    # From the Hadoop code, some definitions:
    # enum RpcKindProto {
    # RPC_BUILTIN          = 0;  // Used for built in calls by tests
    # RPC_WRITABLE         = 1;  // Use WritableRpcEngine
    # RPC_PROTOCOL_BUFFER  = 2;  // Use ProtobufRpcEngine
    # }
    # enum RpcPayloadOperationProto {
    # RPC_FINAL_PAYLOAD        = 0; // The final payload
    # RPC_CONTINUATION_PAYLOAD = 1; // not implemented yet
    # RPC_CLOSE_CONNECTION     = 2; // close the rpc connection
    # }

    rpchead.rpcKind = 2
    rpchead.rpcOp = 0
    rpchead.callId = self._nextcallid

    self.calls[self._nextcallid] = cbinfo
    self._nextcallid += 1


    rpcreq = hadoop_rpc_pb2.HadoopRpcRequestProto()
    rpcreq.methodName = cbinfo["methodName"]
    if request is None:
      rpcreq.request = ""
    else:
      rpcreq.request =  request.SerializeToString()
    rpcreq.declaringClassProtocolName = "org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol"
    rpcreq.clientProtocolVersion = 1

    headerout = rpchead.SerializeToString()
    headerout = encoder._VarintBytes(len(headerout)) + headerout

    reqout = rpcreq.SerializeToString()
    reqout = encoder._VarintBytes(len(reqout)) + reqout

    buf = headerout + reqout

    self.transport.write(pack(">I", len(buf)))
    self.transport.write(buf)
Exemple #9
0
def kafka_produce_protobuf_messages(topic, start_index, num_messages):
    data = ''
    for i in range(start_index, start_index + num_messages):
        msg = kafka_pb2.KeyValuePair()
        msg.key = i
        msg.value = str(i)
        serialized_msg = msg.SerializeToString()
        data = data + _VarintBytes(len(serialized_msg)) + serialized_msg
    producer = KafkaProducer(bootstrap_servers="localhost:9092")
    producer.send(topic=topic, value=data)
    producer.flush()
    print("Produced {} messages for topic {}".format(num_messages, topic))
Exemple #10
0
    def create_rpc_header(self):
        '''Creates and serializes a delimited RpcPayloadHeaderProto message.'''
        rpcheader = rpcheaderproto.RpcPayloadHeaderProto()
        rpcheader.rpcKind = 2  # rpcheaderproto.RpcKindProto.Value('RPC_PROTOCOL_BUFFER')
        rpcheader.rpcOp = 0  # rpcheaderproto.RpcPayloadOperationProto.Value('RPC_FINAL_PAYLOAD')
        rpcheader.callId = self.call_id
        self.call_id += 1

        # Serialize delimited
        s_rpcHeader = rpcheader.SerializeToString()
        log_protobuf_message("RpcPayloadHeader (len: %d)" % (len(s_rpcHeader)), rpcheader)
        return encoder._VarintBytes(len(s_rpcHeader)) + s_rpcHeader
Exemple #11
0
    def create_rpc_request(self, method, request):
        '''Wraps the user's request in an HadoopRpcRequestProto message and serializes it delimited.'''
        s_request = request.SerializeToString()
        log_protobuf_message("Protobuf message", request)
        log.debug("Protobuf message bytes (%d): %s" % (len(s_request), format_bytes(s_request)))
        rpcRequest = hadoop_rpc.HadoopRpcRequestProto()
        rpcRequest.methodName = method.name
        rpcRequest.request = s_request
        rpcRequest.declaringClassProtocolName = "org.apache.hadoop.hdfs.protocol.ClientProtocol"
        rpcRequest.clientProtocolVersion = 1L

        # Serialize delimited
        s_rpcRequest = rpcRequest.SerializeToString()
        log_protobuf_message("RpcRequest (len: %d)" % len(s_rpcRequest), rpcRequest)
        return encoder._VarintBytes(len(s_rpcRequest)) + s_rpcRequest
Exemple #12
0
    def dumMsgToFileProto(self, message):
        """
        

        Parameters
        ----------
        message : TYPE
            DESCRIPTION.

        Returns
        -------
        None.

        """
        size = message.ByteSize()
        self.DumpfileProto.write(_VarintBytes(size))
        self.DumpfileProto.write(message.SerializeToString())
Exemple #13
0
    def __dumpMsgToFileProto(self, message):
        """
        private function to dump MSG as binaryblob \n for new data packet.

        Parameters
        ----------
        message : protobuff message
            Data to be dumped.

        Returns
        -------
        None.

        """
        size = message.ByteSize()
        self.DumpfileProto.write(_VarintBytes(size))
        self.DumpfileProto.write(message.SerializeToString())
Exemple #14
0
    def create_rpc_request(self, method, request):
        '''Wraps the user's request in an HadoopRpcRequestProto message and serializes it delimited.'''
        s_request = request.SerializeToString()
        log_protobuf_message("Protobuf message", request)
        log.debug("Protobuf message bytes (%d): %s" %
                  (len(s_request), format_bytes(s_request)))
        rpcRequest = hadoop_rpc.HadoopRpcRequestProto()
        rpcRequest.methodName = method.name
        rpcRequest.request = s_request
        rpcRequest.declaringClassProtocolName = "org.apache.hadoop.hdfs.protocol.ClientProtocol"
        rpcRequest.clientProtocolVersion = 1L

        # Serialize delimited
        s_rpcRequest = rpcRequest.SerializeToString()
        log_protobuf_message("RpcRequest (len: %d)" % len(s_rpcRequest),
                             rpcRequest)
        return encoder._VarintBytes(len(s_rpcRequest)) + s_rpcRequest
Exemple #15
0
def orderHistory(request):
    orders = purchase.objects.filter(user=request.user.id)
    for order in orders:
        command = amazon.Cmd()
        query_status = amazon.Query()
        query_status.packageid = order.id
        command.queries.append(query_status)
        serialized_request = command.SerializeToString()
        size = command.ByteSize()
        web_socket.sendall(_VarintBytes(size) + serialized_request)
        # response = recv_from_back(amazon.Resp, web_socket)
        # order.status = response.pkgstatus.status
        # order.save()
    time.sleep(4)
    ordersAll = purchase.objects.filter(user=request.user.id)
    context = {'historis': ordersAll}
    return render(request, 'purchase/orderHistory.html', context)
Exemple #16
0
 def to_buffer(self):
     # Each message_info as part of the header needs to be updated
     # so that its length matches the object contained within.
     for obj, message_info in zip(self.objects, self.header.message_infos):
         try:
             object_length = len(obj.SerializeToString())
             provided_length = message_info.length
             if object_length != provided_length:
                 message_info.length = object_length
         except EncodeError as e:
             raise ValueError(
                 "Failed to encode object: %s\nObject: '%s'\nMessage info: %s"
                 % (e, repr(obj), message_info))
     return b''.join([
         _VarintBytes(self.header.ByteSize()),
         self.header.SerializeToString()
     ] + [obj.SerializeToString() for obj in self.objects])
Exemple #17
0
def single(msgtype, stream=False):
    if msgtype == 'cat':
        obj = cat_pb2.Cat(is_lazy=bool(random.getrandbits(1)))
    elif msgtype == 'dog':
        obj = dog_pb2.Dog(age=random.choice(range(0, 20)),
                          breed=['rottweiler', 'gsd',
                                 'poodle'][random.choice(range(0, 3))],
                          temperament=['chill', 'aggressive',
                                       'excited'][random.choice(range(0, 3))])
    elif msgtype == 'person':
        obj = person_pb2.Person(id=random.choice(range(0, 4)),
                                name=['raffi', 'khosrov',
                                      'vahaken'][random.choice(range(0, 3))])
    else:
        usage()
    obj = obj.SerializeToString()
    varint_ = encoder._VarintBytes(len(obj)) if stream else b''
    sys.stdout.buffer.write(varint_ + obj)
Exemple #18
0
    def sendRpcMessage(self, sock, rpcRequest):
        '''Send an RPC request to the server.'''
        try:
            wfile = sock.makefile('w')
            
            """ modified by wuzhw """
            out = rpcRequest.SerializeToString()
            # print("request size: %d"%len(out))
            out = encoder._VarintBytes(len(out))+out
            wfile.write(out)

            """ Original: """
            # wfile.write(rpcRequest.SerializeToString())
            wfile.flush()
            sock.shutdown(socket.SHUT_WR)
        except socket.error:
            self.closeSocket(sock)
            raise error.IOError("Error writing data to server")
    def serialize_delimited(self) -> bytes:
        """
        Write out in delimited format (data is prefixed with the length of the
        datastream).

        This is useful when you are streaming multiple dataset profile objects

        Returns
        -------
        data : bytes
            A sequence of bytes
        """
        with io.BytesIO() as f:
            protobuf: DatasetProfileMessage = self.to_protobuf()
            size = protobuf.ByteSize()
            f.write(_VarintBytes(size))
            f.write(protobuf.SerializeToString(deterministic=True))
            return f.getvalue()
Exemple #20
0
    def __gen_datum(self, key: int, as_text=False):
        encoded_key = encode_key(key)
        record = ''.join(np.random.choice(ALPHABET, size=self.record_size))
        master = key % self.num_replicas

        if as_text:
            datum_tuple = map(str, (encoded_key.decode(), record, master))
            datum = ','.join(datum_tuple) + '\n'
        else:
            datum_proto = Datum()
            datum_proto.key = encoded_key.decode()
            datum_proto.record = record
            datum_proto.master = master
            # Size of the datum and the serialized datum
            datum = (_VarintBytes(datum_proto.ByteSize()) +
                     datum_proto.SerializeToString())

        return datum
Exemple #21
0
def single(msgtype, stream=False):
    if msgtype == "cat":
        obj = cat_pb2.Cat(is_lazy=bool(random.getrandbits(1)))
    elif msgtype == "dog":
        obj = dog_pb2.Dog(
            age=random.choice(range(0, 20)),
            breed=["rottweiler", "gsd", "poodle"][random.choice(range(0, 3))],
            temperament=["chill", "aggressive", "excited"][random.choice(range(0, 3))],
        )
    elif msgtype == "person":
        obj = person_pb2.Person(
            id=random.choice(range(0, 4)),
            name=["raffi", "khosrov", "vahaken"][random.choice(range(0, 3))],
        )
    else:
        usage()
    obj = obj.SerializeToString()
    varint_ = encoder._VarintBytes(len(obj)) if stream else b""
    sys.stdout.buffer.write(varint_ + obj)
Exemple #22
0
def save_grpc_requests(requests: List[types.InferenceRequest]):
    infer_requests = [
        converters.ModelInferRequestConverter.from_types(
            req, model_name=MODEL_NAME, model_version=MODEL_VERSION
        )
        for req in requests
    ]

    requests_file_path = os.path.join(DATA_PATH, "grpc-requests.pb")
    with open(requests_file_path, "wb") as requests_file:
        for req in infer_requests:
            # To stream multiple messages we need to prefix each one with its
            # size
            # https://ghz.sh/docs/options#-b---binary
            size = req.ByteSize()
            size_varint = _VarintBytes(size)
            requests_file.write(size_varint)

            serialised = req.SerializeToString()
            requests_file.write(serialised)
Exemple #23
0
    def __gen_data_per_partition(
        self,
        partition: int,
        keys: list,
        as_text: bool,
    ) -> None:
        # Set per-partition seed so that partitions have
        # different data. Keys and master are not randomly
        # generated so this seed only affects records
        np.random.seed(partition)

        file_name = os.path.join(self.data_dir,
                                 self.prefix + str(partition) + FILE_EXTENSION)
        LOG.info("Generating data for %s", file_name)
        mode = 'w' if as_text else 'wb'
        part_file = open(file_name, mode)

        # Write number of keys in this partition
        if as_text:
            part_file.write(str(len(keys)) + "\n")
        else:
            part_file.write(_VarintBytes(len(keys)))

        last_time = time.time()
        last_index = 0
        for i, key in enumerate(keys):
            # Generate the datum for this key
            datum = self.__gen_datum(key, as_text)
            # Append the datum to file
            part_file.write(datum)

            now = time.time()
            if now - last_time >= LOG_EVERY_SEC:
                pct = (i) / len(keys) * 100
                rate = (i - last_index) / LOG_EVERY_SEC
                LOG.info("Progress: %d/%d (%.1f%%). Rate: %d datums/s", i + 1,
                         len(keys), pct, rate)
                last_time = now
                last_index = i

        part_file.close()
Exemple #24
0
    def serializeDelimited(self):

        # create outer packet message
        packetMessage = protoPackets_pb2.PacketMessage()
        packetMessage.type = protoPackets_pb2.PacketMessage.CHECK
        packetMessage.transactionID = self.transactionID

        # add check-specific content
        packetMessage.checkMessage.newBalance = self.newBalance
        
        for (transactionID, amount) in self.balanceUpdates:
            x = packetMessage.checkMessage.balanceUpdates.add()
            x.transactionID = transactionID
            x.amount = amount 

        print self

        serializedMessage = packetMessage.SerializeToString()
        delimiter = encoder._VarintBytes(len(serializedMessage))

        return delimiter + serializedMessage       
Exemple #25
0
    def serializeDelimited(self):

        # create outer packet message
        packetMessage = protoPackets_pb2.PacketMessage()
        packetMessage.type = protoPackets_pb2.PacketMessage.CHECK
        packetMessage.transactionID = self.transactionID

        # add check-specific content
        packetMessage.checkMessage.newBalance = self.newBalance

        for (transactionID, amount) in self.balanceUpdates:
            x = packetMessage.checkMessage.balanceUpdates.add()
            x.transactionID = transactionID
            x.amount = amount

        print self

        serializedMessage = packetMessage.SerializeToString()
        delimiter = encoder._VarintBytes(len(serializedMessage))

        return delimiter + serializedMessage
    def write_protobuf(self, protobuf_path: str, delimited_file: bool = True):
        """
        Write the dataset profile to disk in binary format

        Parameters
        ----------
        protobuf_path : str
            local path or any path supported supported by smart_open:
            https://github.com/RaRe-Technologies/smart_open#how.
            The parent directory must already exist
        delimited_file : bool, optional
            whether to prefix the data with the length of output or not.
            Default is True

        """
        with open(protobuf_path, "wb") as f:
            msg = self.to_protobuf()
            size = msg.ByteSize()
            if delimited_file:
                f.write(_VarintBytes(size))
            f.write(msg.SerializeToString())
    def __sendDataMsg(self):
        """
        Sends out simulated data.

        Returns
        -------
        None.

        """
        tmptime = time.time()
        secs = int(np.floor(tmptime))
        nsecs = int((tmptime - secs) * 1e9)
        # setting up the proto message
        protodata = messages_pb2.DataMessage()
        protodata.id = self.params["ID"]
        protodata.sample_number = self.packetssend
        protodata.unix_time = secs
        protodata.unix_time_nsecs = nsecs
        protodata.time_uncertainty = 1000000
        res = 2**self.params["resolutionbit"]
        tmp = (nsecs / 1e9) * (res - 1) - res / 2
        qunatizedint = int(tmp)
        qunatizedfloat = qunatizedint / res * 2
        protodata.Data_01 = np.sin(qunatizedfloat * np.pi)
        protodata.Data_02 = np.cos(qunatizedfloat * np.pi)
        protodata.Data_03 = qunatizedfloat
        protodata.Data_04 = abs(qunatizedfloat)
        binproto = protodata.SerializeToString()
        # add DATA peramble for data Pacet
        binarymessage = b"DATA"
        binarymessage = binarymessage + _VarintBytes(len(binproto)) + binproto
        self.socket.sendto(binarymessage,
                           (self.params["TargetIp"], self.params["Port"]))
        self.packetssend = self.packetssend + 1
        if self.packetssend % self.params["UpdateRateHz"] == 0:
            print(str(self.packetssend) + " Packets sent")
        return
Exemple #28
0
 def activate(self):
     retry = None
     while True:
         with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as conn:
             conn.connect((self.hostname, self.port))
             conn.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
             conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 1)
             conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 5)
             conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)
             while True:
                 if retry is None:
                     cmd = self.command_queue.get()
                     to_send = ClusterCommand()
                     to_send.type, to_send.data = cmd
                 else:
                     to_send = retry
                 size = to_send.ByteSize()
                 try:
                     conn.send(
                         _VarintBytes(size) + to_send.SerializeToString())
                     retry = None
                 except BrokenPipeError:
                     retry = to_send
                     break
Exemple #29
0
    def __sendDataMsg(self, line):
        """
        Sends out data.

        Returns
        -------
        None.

        """
        # setting up the proto message
        protodata = messages_pb2.DataMessage()
        protodata.id = int(line[0])
        protodata.sample_number = int(line[1])
        protodata.unix_time = int(line[2])
        protodata.unix_time_nsecs = int(line[3])
        protodata.time_uncertainty = int(line[4])
        for i in range(16):
            protodata.__setattr__(self.dataFieldNames[i], float(line[5 + i]))
        binproto = protodata.SerializeToString()
        # add DATA peramble for data Pacet
        binarymessage = b"DATA"
        binarymessage = binarymessage + _VarintBytes(len(binproto)) + binproto
        self.socket.sendto(binarymessage,
                           (self.params["TargetIp"], self.params["Port"]))
Exemple #30
0
 def encode_delimited(cls, x, typ):
     """ Encode a message or value with size information
         (for use in a delimited communication stream) """
     data = cls.encode(x, typ)
     delimiter = protobuf_encoder._VarintBytes(len(data))
     return delimiter + data
Exemple #31
0
def delimited_value(msg):
    # Emits a writeDelimited compatible Protobuf message
    o = msg.SerializeToString()
    d = encoder._VarintBytes(len(o))
    return d + o
def getInfo(protocolName,methodName,requestProto,responseProto):
	rpcrequestheader = RpcRequestHeaderProto()
	rpcrequestheader.rpcKind = 2
	#RpcHeader_pb2.RPC_PROTOCOL_BUFFER
	rpcrequestheader.rpcOp = 0
	#RpcRequestHeaderProto.RPC_FINAL_PACKET
	rpcrequestheader.callId = -3 # During initial connection
	# 0 otherwise
	# 4 for ping i guess 
	client_id = str(uuid.uuid4())
	rpcrequestheader.clientId = client_id[0:16]
	s_rpcrequestheader = rpcrequestheader.SerializeToString()

	'''Create and serialize a IpcConnectionContextProto '''
	context = IpcConnectionContextProto()
	context.userInfo.effectiveUser = "******"
	
	context.protocol = protocolName
	
	#"org.apache.hadoop.mapreduce.v2.api.MRClientProtocolPB" 
	
	s_context = context.SerializeToString()

	''' Length of the two messages '''
	rpcipc_length = len(s_rpcrequestheader) + encoder._VarintSize(len(s_rpcrequestheader)) + len(s_context) + encoder._VarintSize(len(s_context))

	 
	''' Send to server in the order given above'''
	sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
	sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
	sock.settimeout(10)
	sock.connect(("localhost", 8032)) #8020 for name node, 8032 for yarn 

	sock.send("hrpc") # header
	sock.send(struct.pack('B', 9)) # version
	sock.send(struct.pack('B', 0x00)) # RPC service class
	sock.send(struct.pack('B', 0x00)) # auth none


	sock.sendall(struct.pack('!I', rpcipc_length) + 
	encoder._VarintBytes(len(s_rpcrequestheader)) + 
	s_rpcrequestheader + 
	encoder._VarintBytes(len(s_context)) + 
	s_context)


	'''
	Create the Hadoop RPC protocol looks like this for sending requests:
	When sending requests
	+---------------------------------------------------------------------+
	| Length of the next three parts (4 bytes/32 bit int) |
	+---------------------------------------------------------------------+
	| Delimited serialized RpcRequestHeaderProto (varint len + header) |
	+---------------------------------------------------------------------+
	| Delimited serialized RequestHeaderProto (varint len + header) |
	+---------------------------------------------------------------------+
	| Delimited serialized Request (varint len + request) |
	+---------------------------------------------------------------------+
	'''


	'''
	Steps:
	1. create rpcrequestheader
	2. create requestheader in which you can mention the name of protocol and method name you want 
	3. create actual request i guess you can use it to pass parameters  

	'''


	''' we need a rpcrequestheaderproto for every message we send ''' 
	rpcrequestheader = RpcRequestHeaderProto()
	rpcrequestheader.rpcKind = 2 #RpcHeader_pb2.RPC_PROTOCOL_BUFFER
	rpcrequestheader.rpcOp = 0   #RpcRequestHeaderProto.RPC_FINAL_PACKET
	rpcrequestheader.callId = 0  # For all other communication other than initial, 4 for ping  
	client_id = str(uuid.uuid4())
	rpcrequestheader.clientId = client_id[0:16]
	s_rpcrequestheader = rpcrequestheader.SerializeToString()
	''' ok thats our header''' 

	'''lets create our requestheaderproto ''' 
	requestheader = RequestHeaderProto()
	requestheader.methodName = methodName 
	#"getClusterNodes"
	
	requestheader.declaringClassProtocolName = protocolName
	#"org.apache.hadoop.yarn.api.ApplicationClientProtocolPB"
	requestheader.clientProtocolVersion= 1 
	'''serialize this ''' 
	s_requestheader = requestheader.SerializeToString()

	'''Now we need to write our actual request....may be lets start it here  ''' 
	 
	#request = protoFile.GetClusterMetricsRequestProto()
	s_request = requestProto.SerializeToString()


	''' lenght of three messages ''' 
	rpc_message_length = len(s_rpcrequestheader) + encoder._VarintSize(len(s_rpcrequestheader)) + \
		                     len(s_requestheader) + encoder._VarintSize(len(s_requestheader)) + \
		                     len(s_request) + encoder._VarintSize(len(s_request))

	'''pack in the above given format and send :)  '''   
	sock.sendall(struct.pack('!I', rpc_message_length) + 
	encoder._VarintBytes(len(s_rpcrequestheader)) + 
	s_rpcrequestheader + 
	encoder._VarintBytes(len(s_requestheader))+
	s_requestheader+
	encoder._VarintBytes(len(s_request)) + 
	s_request)
	
	#responseObject = yarn_service_protos_pb2.GetClusterMetricsResponseProto
	
	stream = recv_rpc_message(sock)
	parse_response(stream, responseProto)
        spot.z = float(i3_block['zc'][i])

        spot.intensity = float(i3_block['a'][i])
        spot.background = float(i3_block['bg'][i])
        spot.width = float(i3_block['w'][i])
        spot.a = float(i3_block['ax'][i])
        spot.theta = 0.0

        spot.x_original = float(i3_block['x'][i])*pix_to_nm
        spot.y_original = float(i3_block['y'][i])*pix_to_nm
        spot.z_original = float(i3_block['z'][i])

        localization_number += 1

        out = spot.SerializeToString()
        out = encoder._VarintBytes(len(out)) + out
        tsf_file.write(out)

    i3_block = i3_reader.nextBlock(block_size = 1000, good_only = False)

print("")
print(localization_number, "total localizations")
# Save SpotList.
print("")
print("Saving analysis meta-data")
print(" data set contains", len(channels), "channels", channels)
spot_list = TSFProto_pb2.SpotList()

# FIXME: get a real id..
spot_list.application_id = 1
spot_list.name = os.path.basename(sys.argv[1])
Exemple #34
0
      runCommand("heightOn")

      runCommand("i2cOn")

      ok = runTest("i2cReady " + str(SERIAL_ADDRESS), "ok", "Serial number not ready. I2C lines probably hosed.") and ok
      ok = runTest("i2cReady " + str(MEMORY_ADDRESS), "ok", "Memory address not ready. I2C lines probably hosed.") and ok

      serial_number = i2cRead(SERIAL_ADDRESS, SERIAL_PAGE, 16)
      if serial_number not in ["error", "timeout", "busy"]:
        serial_number = base64.urlsafe_b64encode(binascii.unhexlify(serial_number.replace(" ", ""))).strip("=")

        # Set manufacturing info into the board info.
        board_info.manufacturing_info.test_time = long(startTime)
        serialized_board_info = board_info.SerializeToString()
        delimiter = encoder._VarintBytes(len(serialized_board_info))

        ok = i2cWrite(MEMORY_ADDRESS + 1, MEMORY_PAGE, delimiter + serialized_board_info) and ok
      else:
        logError("Unable to read EEPROM serial number!")
        ok = False

      runCommand("i2cOff")

      runCommand("heightOff")
    else:
      # Wait for bootup.
      time.sleep(0.5)
      testBin = "builds/" + board + "/test.bin"
      returncode = flash(testBin)
      if returncode != 0:
Exemple #35
0
 def write_delimited(self, data):
     self.write(encoder._VarintBytes(len(data)))
     self.write(data)
Exemple #36
0
def write(p, st):
    sm = p.SerializeToString()
    l = len(sm)
    s = encoder._VarintBytes(len(sm))
    st.write(s + sm)
def writeDelimitedTo(message, connection):
    message_str = message.SerializeToString()
    delimiter = encoder._VarintBytes(len(message_str))
    connection.send(delimiter + message_str)
Exemple #38
0
    def readBlock(self, length, pool_id, block_id, generation_stamp, offset, check_crc):
        '''Send a read request to given block. If we receive a successful response,
        we start reading packets.

        Send read request:
        +---------------------------------------------------------------------+
        |  Data Transfer Protocol Version, 2 bytes                            |
        +---------------------------------------------------------------------+
        |  Op code, 1 byte (READ_BLOCK = 81)                                  |
        +---------------------------------------------------------------------+
        |  Delimited serialized OpReadBlockProto (varint len + request)       |
        +---------------------------------------------------------------------+

        Receive response:
        +---------------------------------------------------------------------+
        |  Delimited BlockOpResponseProto (varint len + response)             |
        +---------------------------------------------------------------------+

        Start reading packets. Each packet has the following structure:
        +---------------------------------------------------------------------+
        |  Packet length (4 bytes/32 bit int)                                 |
        +---------------------------------------------------------------------+
        |  Serialized size of header, 2 bytes                                 |
        +---------------------------------------------------------------------+
        |  Packet Header Proto                                                |
        +---------------------------------------------------------------------+
        |  x checksums, 4 bytes each                                          |
        +---------------------------------------------------------------------+
        |  x chunks of payload data                                           |
        +---------------------------------------------------------------------+

        '''
        log.debug("%s sending readBlock request" % self)

        # Send version and opcode
        self.sock.send(struct.pack('>h', 28))
        self.sock.send(struct.pack('b', self.READ_BLOCK))
        length = length - offset

        # Create and send OpReadBlockProto message
        request = datatransfer_proto.OpReadBlockProto()
        request.offset = offset
        request.len = length
        header = request.header
        header.clientName = "snakebite"
        base_header = header.baseHeader
        block = base_header.block
        block.poolId = pool_id
        block.blockId = block_id
        block.generationStamp = generation_stamp
        s_request = request.SerializeToString()
        log_protobuf_message("OpReadBlockProto:", request)
        delimited_request = encoder._VarintBytes(len(s_request)) + s_request
        self.sock.send(delimited_request)

        byte_stream = RpcBufferedReader(self.sock)
        block_op_response_bytes = get_delimited_message_bytes(byte_stream)

        block_op_response = datatransfer_proto.BlockOpResponseProto()
        block_op_response.ParseFromString(block_op_response_bytes)
        log_protobuf_message("BlockOpResponseProto", block_op_response)

        checksum_type = block_op_response.readOpChecksumInfo.checksum.type
        bytes_per_chunk = block_op_response.readOpChecksumInfo.checksum.bytesPerChecksum
        log.debug("Checksum type: %s, bytesPerChecksum: %s" % (checksum_type, bytes_per_chunk))
        if checksum_type in [self.CHECKSUM_CRC32C, self.CHECKSUM_CRC32]:
            checksum_len = 4
        else:
            raise Exception("Checksum type %s not implemented" % checksum_type)

        total_read = 0
        if block_op_response.status == 0:  # datatransfer_proto.Status.Value('SUCCESS')
            while total_read < length:
                log.debug("== Reading next packet")

                packet_len = struct.unpack("!I", byte_stream.read(4))[0]
                log.debug("Packet length: %s", packet_len)

                serialized_size = struct.unpack("!H", byte_stream.read(2))[0]
                log.debug("Serialized size: %s", serialized_size)

                packet_header_bytes = byte_stream.read(serialized_size)
                packet_header = datatransfer_proto.PacketHeaderProto()
                packet_header.ParseFromString(packet_header_bytes)
                log_protobuf_message("PacketHeaderProto", packet_header)

                data_len = packet_header.dataLen

                chunks_per_packet = int((data_len + bytes_per_chunk - 1) / bytes_per_chunk)
                log.debug("Nr of chunks: %d", chunks_per_packet)

                data_len = packet_len - 4 - chunks_per_packet * checksum_len
                log.debug("Payload len: %d", data_len)

                byte_stream.reset()

                # Collect checksums
                if check_crc:
                    checksums = []
                    for _ in xrange(0, chunks_per_packet):
                        checksum = self._read_bytes(checksum_len)
                        checksum = struct.unpack("!I", checksum)[0]
                        checksums.append(checksum)
                else:
                    self._read_bytes(checksum_len * chunks_per_packet)

                # We use a fixed size buffer (a "load") to read only a couple of chunks at once. 
                bytes_per_load = self.LOAD_SIZE - (self.LOAD_SIZE % bytes_per_chunk)
                chunks_per_load = int(bytes_per_load / bytes_per_chunk)
                loads_per_packet = int(math.ceil(bytes_per_chunk * chunks_per_packet / bytes_per_load))

                read_on_packet = 0
                for i in range(loads_per_packet):
                    load = ''
                    for j in range(chunks_per_load):
                        log.debug("Reading chunk %s in load %s:", j, i)
                        bytes_to_read = min(bytes_per_chunk, data_len - read_on_packet)
                        chunk = self._read_bytes(bytes_to_read)
                        if check_crc:
                            checksum_index = i * chunks_per_load + j
                            if checksum_index < len(checksums) and crc(chunk) != checksums[checksum_index]:
                                raise Exception("Checksum doesn't match")
                        load += chunk
                        total_read += len(chunk)
                        read_on_packet += len(chunk)
                    yield load
           
            # Send ClientReadStatusProto message confirming successful read
            request = datatransfer_proto.ClientReadStatusProto()
            request.status = 0  # SUCCESS
            s_request = request.SerializeToString()
            log_protobuf_message("ClientReadStatusProto:", request)
            delimited_request = encoder._VarintBytes(len(s_request)) + s_request
            self.sock.send(delimited_request)
            self._close_socket()
def createMsg():
	
	rpcrequestheader = RpcRequestHeaderProto()
	rpcrequestheader.rpcKind = 2
	#RpcHeader_pb2.RPC_PROTOCOL_BUFFER
	rpcrequestheader.rpcOp = 0
	#RpcRequestHeaderProto.RPC_FINAL_PACKET
	rpcrequestheader.callId = -3 # During initial connection
	# 0 otherwise
	# 4 for ping i guess 
	client_id = str(uuid.uuid4())
	rpcrequestheader.clientId = client_id[0:16]
	s_rpcrequestheader = rpcrequestheader.SerializeToString()

	'''Create and serialize a IpcConnectionContextProto '''
	context = IpcConnectionContextProto()
	context.userInfo.effectiveUser = "******"
	context.protocol = "org.apache.hadoop.hdfs.protocol.ClientProtocol"  #"org.apache.hadoop.yarn.protocol.MRClientProtocol" #"org.apache.hadoop.mapred.JobSubmissionProtocol"
	
	s_context = context.SerializeToString()

	''' Length of the two messages '''
	rpcipc_length = len(s_rpcrequestheader) + encoder._VarintSize(len(s_rpcrequestheader)) + len(s_context) + encoder._VarintSize(len(s_context))

	 
	''' Send to server in the order given above'''
	sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
	sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
	sock.settimeout(10)
	sock.connect(("localhost", 8020))

	sock.send("hrpc") # header
	sock.send(struct.pack('B', 9)) # version
	sock.send(struct.pack('B', 0x00)) # RPC service class
	sock.send(struct.pack('B', 0x00)) # auth none


	sock.sendall(struct.pack('!I', rpcipc_length) + 
	encoder._VarintBytes(len(s_rpcrequestheader)) + 
	s_rpcrequestheader + 
	encoder._VarintBytes(len(s_context)) + 
	s_context)


	'''
	Create the Hadoop RPC protocol looks like this for sending requests:
	When sending requests
	+---------------------------------------------------------------------+
	| Length of the next three parts (4 bytes/32 bit int) |
	+---------------------------------------------------------------------+
	| Delimited serialized RpcRequestHeaderProto (varint len + header) |
	+---------------------------------------------------------------------+
	| Delimited serialized RequestHeaderProto (varint len + header) |
	+---------------------------------------------------------------------+
	| Delimited serialized Request (varint len + request) |
	+---------------------------------------------------------------------+
	'''


	'''
	Steps:
	1. create rpcrequestheader
	2. create requestheader in which you can mention the name of protocol and method name you want 
	3. create actual request i guess you can use it to pass parameters  

	'''


	''' we need a rpcrequestheaderproto for every message we send ''' 
	rpcrequestheader = RpcRequestHeaderProto()
	rpcrequestheader.rpcKind = 2 #RpcHeader_pb2.RPC_PROTOCOL_BUFFER
	rpcrequestheader.rpcOp = 0   #RpcRequestHeaderProto.RPC_FINAL_PACKET
	rpcrequestheader.callId = 0  # For all other communication other than initial, 4 for ping i guess 
	client_id = str(uuid.uuid4())
	rpcrequestheader.clientId = client_id[0:16]
	s_rpcrequestheader = rpcrequestheader.SerializeToString()
	''' ok thats our header''' 

	'''lets create our requestheaderproto ''' 
	requestheader = RequestHeaderProto()
	requestheader.methodName = "getServerDefaults" #"getDiagnostics" # "getCounters" #"getJobReport"
	#"GetFsStatusRequestProto " # #"getAllJobs" #GetFsStatusRequestProto 
	requestheader.declaringClassProtocolName = "org.apache.hadoop.hdfs.protocol.ClientProtocol" #"org.apache.hadoop.yarn.protocol.MRClientProtocol"
		#"org.apache.hadoop.hdfs.protocol.ClientProtocol" 
		#"org.apache.hadoop.mapred.JobSubmissionProtocol" 
		# org.apache.hadoop.hdfs.protocol.ClientProtocol
	
	requestheader.clientProtocolVersion= 1  # not sure what is this 2,28 
	'''serialize this ''' 
	s_requestheader = requestheader.SerializeToString()


	'''Now we need to write our actual request....may be lets start it here  '''  
	#request = mr_service_protos_pb2.GetJobReportRequestProto()
	request = ClientNamenodeProtocol_pb2.GetServerDefaultsRequestProto() 
	s_request = request.SerializeToString() # random shit 


	''' lenght of three messages ''' 
	rpc_message_length = len(s_rpcrequestheader) + encoder._VarintSize(len(s_rpcrequestheader)) + \
		                     len(s_requestheader) + encoder._VarintSize(len(s_requestheader)) + \
		                     len(s_request) + encoder._VarintSize(len(s_request))

	'''pack in the above given format and send :)  '''   
	sock.sendall(struct.pack('!I', rpc_message_length) + 
	encoder._VarintBytes(len(s_rpcrequestheader)) + 
	s_rpcrequestheader + 
	encoder._VarintBytes(len(s_requestheader))+
	s_requestheader+
	encoder._VarintBytes(len(s_request)) + 
	s_request)
	
	#responseObject = mr_service_protos_pb2.GetJobReportResponseProto()
	responseObject = ClientNamenodeProtocol_pb2.GetServerDefaultsResponseProto() 
	
	#get_message(sock,responseObject)
	stream = recv_rpc_message(sock)
	parse_response(stream, ClientNamenodeProtocol_pb2.GetServerDefaultsResponseProto) #response proto here  
	print "reading response"
Exemple #40
0
    def handle_connection(self):
        """
        Run connection handling routine.
        """

        context = SSLContext(PROTOCOL_TLS_CLIENT)
        context.check_hostname = False
        context.verify_mode = CERT_NONE

        with create_connection(
            (self.server_host, self.server_port)) as self.socket:
            with context.wrap_socket(
                    self.socket, server_hostname=self.server_host) as self.tls:

                # Use this buffer for all reads
                read_buffer = bytearray(4096)

                # Perform SID handshake
                rq = RQ_Cvid()
                rq.uuid = os.environ["S7S_UUID"]
                rq.instance = InstanceType.CLIENT
                rq.instance_flavor = InstanceFlavor.CLIENT_BRIGHTSTONE

                msg = MSG()
                setattr(msg, "payload", rq.SerializeToString())
                setattr(msg, "id", randint(0, 65535))

                self.tls.write(_VarintBytes(msg.ByteSize()))
                self.tls.sendall(msg.SerializeToString())

                read = self.tls.recv_into(read_buffer, 4096)
                if read == 0:
                    raise EOFError("")

                msg_len, msg_start = _DecodeVarint32(read_buffer, 0)

                msg = MSG()
                msg.ParseFromString(read_buffer[msg_start:msg_start + msg_len])

                rs = RS_Cvid()
                rs.ParseFromString(msg.payload)

                self.server_cvid = rs.server_cvid
                self.sid = rs.sid

                # The connection is now connected
                with self.connection_state_cv:
                    self.connection_state = ConnectionState.CONNECTED
                    self.connection_state_cv.notify()

                # Begin accepting messages
                while True:

                    # TODO there may be another message in the read buffer

                    # Read from the socket
                    read = self.tls.recv_into(read_buffer, 4096)
                    if read == 0:
                        raise EOFError("")

                    n = 0
                    while n < read:
                        msg_len, n = _DecodeVarint32(read_buffer, n)

                        msg = MSG()
                        msg.ParseFromString(read_buffer[n:n + msg_len])

                        # Place message in response map
                        with self.response_map_cv:
                            self.response_map[msg.id] = msg
                            self.response_map_cv.notify()

        # The connection is now closed
        with self.connection_state_cv:
            self.connection_state = ConnectionState.CLOSED
            self.connection_state_cv.notify()
    print '\n' + "size: "+str(size)+" position: "+str(position) + '\n'
    
    if size == 4294967295: # this is a hack my friend we probably need fixlength types for the length
		break
    
    # 1) Reading the keyValue pair from the socket
    toRead = size+position-READING_BYTES
    buf += inSock.recv(toRead) # this is probably inefficient because the buffer sizes changes all the time
    print("bufSize "+str(len(buf)))
    kv = keyValue_pb2.KeyValuePair()
    kv.ParseFromString(buf[position:position+size])
    print("key "+kv.key)
    print("value "+kv.value)
    
    kvs = keyValue_pb2.keyValueStream()
    kvp = kvs.record.add()
    kvp.key = "key1"
    kvp.value = "value1"
    
    kvp = kvs.record.add()
    kvp.key = "key2"
    kvp.value = "value2"
    
    # 2) Reading the keyValue pair from the socket
    outBuf = kvs.SerializeToString()
    print "Sending back to java- outbuf-len: " + str(len(outBuf))
    buf = encoder._VarintBytes(len(outBuf))
    inSock.send(buf)
    inSock.send(outBuf)
    
print "Got -1, Finishing python process"
import metric_pb2

from google.protobuf.internal.encoder import _VarintBytes
from google.protobuf.internal.decoder import _DecodeVarint32

with open('out.bin', 'wb') as f:
    for i in range(128):
        my_metric = metric_pb2.Metric()
        my_metric.name = 'sys.cpu'
        my_metric.type = 'gauge'
        my_metric.value = i
        my_metric.tags = str(i) + 'tag'
        size = my_metric.ByteSize()
        f.write(_VarintBytes(size))
        f.write(my_metric.SerializeToString())

with open('out.bin', 'rb') as f:
    buf = f.read()
    n = 0
    while n < len(buf):
        msg_len, new_pos = _DecodeVarint32(buf, n)
        n = new_pos
        msg_buf = buf[n:n + msg_len]
        n += msg_len
        read_metric = metric_pb2.Metric()
        read_metric.ParseFromString(msg_buf)
        # do something with read_metric
        print(read_metric)
Exemple #43
0
    def readBlock(self, length, pool_id, block_id, generation_stamp, offset,
                  check_crc):
        '''Send a read request to given block. If we receive a successful response,
        we start reading packets.

        Send read request:
        +---------------------------------------------------------------------+
        |  Data Transfer Protocol Version, 2 bytes                            |
        +---------------------------------------------------------------------+
        |  Op code, 1 byte (READ_BLOCK = 81)                                  |
        +---------------------------------------------------------------------+
        |  Delimited serialized OpReadBlockProto (varint len + request)       |
        +---------------------------------------------------------------------+

        Receive response:
        +---------------------------------------------------------------------+
        |  Delimited BlockOpResponseProto (varint len + response)             |
        +---------------------------------------------------------------------+

        Start reading packets. Each packet has the following structure:
        +---------------------------------------------------------------------+
        |  Packet length (4 bytes/32 bit int)                                 |
        +---------------------------------------------------------------------+
        |  Serialized size of header, 2 bytes                                 |
        +---------------------------------------------------------------------+
        |  Packet Header Proto                                                |
        +---------------------------------------------------------------------+
        |  x checksums, 4 bytes each                                          |
        +---------------------------------------------------------------------+
        |  x chunks of payload data                                           |
        +---------------------------------------------------------------------+

        '''
        log.debug("%s sending readBlock request" % self)

        # Send version and opcode
        self.sock.send(struct.pack('>h', 28))
        self.sock.send(struct.pack('b', self.READ_BLOCK))
        length = length - offset

        # Create and send OpReadBlockProto message
        request = datatransfer_proto.OpReadBlockProto()
        request.offset = offset
        request.len = length
        header = request.header
        header.clientName = "snakebite"
        base_header = header.baseHeader
        block = base_header.block
        block.poolId = pool_id
        block.blockId = block_id
        block.generationStamp = generation_stamp
        s_request = request.SerializeToString()
        log_protobuf_message("OpReadBlockProto:", request)
        delimited_request = encoder._VarintBytes(len(s_request)) + s_request
        self.sock.send(delimited_request)

        byte_stream = RpcBufferedReader(self.sock)
        block_op_response_bytes = get_delimited_message_bytes(byte_stream)

        block_op_response = datatransfer_proto.BlockOpResponseProto()
        block_op_response.ParseFromString(block_op_response_bytes)
        log_protobuf_message("BlockOpResponseProto", block_op_response)

        checksum_type = block_op_response.readOpChecksumInfo.checksum.type
        bytes_per_chunk = block_op_response.readOpChecksumInfo.checksum.bytesPerChecksum
        log.debug("Checksum type: %s, bytesPerChecksum: %s" %
                  (checksum_type, bytes_per_chunk))
        if checksum_type in [self.CHECKSUM_CRC32C, self.CHECKSUM_CRC32]:
            checksum_len = 4
        else:
            raise Exception("Checksum type %s not implemented" % checksum_type)

        total_read = 0
        if block_op_response.status == 0:  # datatransfer_proto.Status.Value('SUCCESS')
            while total_read < length:
                log.debug("== Reading next packet")

                packet_len = struct.unpack("!I", byte_stream.read(4))[0]
                log.debug("Packet length: %s", packet_len)

                serialized_size = struct.unpack("!H", byte_stream.read(2))[0]
                log.debug("Serialized size: %s", serialized_size)

                packet_header_bytes = byte_stream.read(serialized_size)
                packet_header = datatransfer_proto.PacketHeaderProto()
                packet_header.ParseFromString(packet_header_bytes)
                log_protobuf_message("PacketHeaderProto", packet_header)

                data_len = packet_header.dataLen

                chunks_per_packet = int(
                    (data_len + bytes_per_chunk - 1) / bytes_per_chunk)
                log.debug("Nr of chunks: %d", chunks_per_packet)

                data_len = packet_len - 4 - chunks_per_packet * checksum_len
                log.debug("Payload len: %d", data_len)

                byte_stream.reset()

                # Collect checksums
                if check_crc:
                    checksums = []
                    for _ in xrange(0, chunks_per_packet):
                        checksum = self._read_bytes(checksum_len)
                        checksum = struct.unpack("!I", checksum)[0]
                        checksums.append(checksum)
                else:
                    self._read_bytes(checksum_len * chunks_per_packet)

                # We use a fixed size buffer (a "load") to read only a couple of chunks at once.
                bytes_per_load = self.LOAD_SIZE - (self.LOAD_SIZE %
                                                   bytes_per_chunk)
                chunks_per_load = int(bytes_per_load / bytes_per_chunk)
                loads_per_packet = int(
                    math.ceil(bytes_per_chunk * chunks_per_packet /
                              bytes_per_load))

                read_on_packet = 0
                for i in range(loads_per_packet):
                    load = ''
                    for j in range(chunks_per_load):
                        log.debug("Reading chunk %s in load %s:", j, i)
                        bytes_to_read = min(bytes_per_chunk,
                                            data_len - read_on_packet)
                        chunk = self._read_bytes(bytes_to_read)
                        if check_crc:
                            checksum_index = i * chunks_per_load + j
                            if checksum_index < len(checksums) and crc(
                                    chunk) != checksums[checksum_index]:
                                raise Exception("Checksum doesn't match")
                        load += chunk
                        total_read += len(chunk)
                        read_on_packet += len(chunk)
                    yield load

            # Send ClientReadStatusProto message confirming successful read
            request = datatransfer_proto.ClientReadStatusProto()
            request.status = 0  # SUCCESS
            s_request = request.SerializeToString()
            log_protobuf_message("ClientReadStatusProto:", request)
            delimited_request = encoder._VarintBytes(
                len(s_request)) + s_request
            self.sock.send(delimited_request)
            self._close_socket()
 def serialize_for_stream(self, message):
     protobuf_message = ProtobufFormatter.serialize(message)
     delimiter = encoder._VarintBytes(len(protobuf_message))
     return delimiter + protobuf_message
Exemple #45
0
def login_req2buf(name, password):
    # 随机生成16位登录包AesKey
    login_aes_key = bytes(''.join(random.sample(string.ascii_letters + string.digits, 16)), encoding="utf8")

    # protobuf组包1
    accountRequest = mm_pb2.ManualAuthAccountRequest(
        aes=mm_pb2.ManualAuthAccountRequest.AesKey(
            len=16,
            key=login_aes_key
        ),
        ecdh=mm_pb2.ManualAuthAccountRequest.Ecdh(
            nid=713,
            ecdhKey=mm_pb2.ManualAuthAccountRequest.Ecdh.EcdhKey(
                len=len(Util.EcdhPubKey),
                key=Util.EcdhPubKey
            )
        ),
        userName=name,
        password1=Util.GetMd5(password),
        password2=Util.GetMd5(password)
    )
    # protobuf组包2
    deviceRequest = mm_pb2.ManualAuthDeviceRequest(
        login=mm_pb2.LoginInfo(
            aesKey=login_aes_key,
            uin=0,
            guid=define.__GUID__ + '\0',  # guid以\0结尾
            clientVer=define.__CLIENT_VERSION__,
            androidVer=define.__ANDROID_VER__,
            unknown=1,
        ),
        tag2=mm_pb2.ManualAuthDeviceRequest._Tag2(),
        imei=define.__IMEI__,
        softInfoXml=define.__SOFTINFO__.format(define.__IMEI__, define.__ANDROID_ID__, define.__MANUFACTURER__+" "+define.__MODELNAME__, define.__MOBILE_WIFI_MAC_ADDRESS__,
                                               define.__CLIENT_SEQID_SIGN__, define.__AP_BSSID__, define.__MANUFACTURER__, "taurus", define.__MODELNAME__, define.__IMEI__),
        unknown5=0,
        clientSeqID=define.__CLIENT_SEQID__,
        clientSeqID_sign=define.__CLIENT_SEQID_SIGN__,
        loginDeviceName=define.__MANUFACTURER__+" "+define.__MODELNAME__,
        deviceInfoXml=define.__DEVICEINFO__.format(
            define.__MANUFACTURER__, define.__MODELNAME__),
        language=define.__LANGUAGE__,
        timeZone="8.00",
        unknown13=0,
        unknown14=0,
        deviceBrand=define.__MANUFACTURER__,
        deviceModel=define.__MODELNAME__+"armeabi-v7a",
        osType=define.__ANDROID_VER__,
        realCountry="cn",
        unknown22=2,  # Unknown
    )

    logger.debug("accountData protobuf数据:" + Util.b2hex(accountRequest.SerializeToString()))
    logger.debug("deviceData protobuf数据:" +  Util.b2hex(deviceRequest.SerializeToString()))

    # 加密
    reqAccount = Util.compress_and_rsa(accountRequest.SerializeToString())
    reqDevice = Util.compress_and_aes(deviceRequest.SerializeToString(), login_aes_key)

    logger.debug("加密后数据长度:reqAccount={},reqDevice={}".format(len(reqAccount), len(reqDevice[0])))
    logger.debug("加密后reqAccount数据:" + Util.b2hex(reqAccount))
    logger.debug("加密后reqDevice数据:" + Util.b2hex(reqDevice[0]))

    # 封包包体
    subheader = b''
    subheader += struct.pack(">I", len(accountRequest.SerializeToString()))                         # accountData protobuf长度
    subheader += struct.pack(">I", len(deviceRequest.SerializeToString()))                          # deviceData protobuf长度
    subheader += struct.pack(">I", len(reqAccount))                                                 # accountData RSA加密后长度
    # 包体由头信息、账号密码加密后数据、硬件设备信息加密后数据3部分组成
    body = subheader + reqAccount + reqDevice[0]

    # 封包包头
    header = bytearray(0)
    header += bytes([0])                                                                            # 最后2bit:02--包体不使用压缩算法;前6bit:包头长度,最后计算
    header += bytes([((0x7 << 4) + 0xf)])                                                           # 07:RSA加密算法  0xf:cookie长度
    header += struct.pack(">I", define.__CLIENT_VERSION__)                                          # 客户端版本号 网络字节序
    header += bytes([0]*4)                                                                          # uin
    header += bytes([0]*15)                                                                         # coockie
    header += encoder._VarintBytes(701)                                                             # cgi type
    header += encoder._VarintBytes(len(body))                                                       # body 压缩前长度
    header += encoder._VarintBytes(len(body))                                                       # body 压缩后长度(登录包不需要压缩body数据)
    header += struct.pack(">B", define.__LOGIN_RSA_VER__)                                           # RSA秘钥版本
    header += b'\x01\x02'                                                                           # Unknown Param
    header[0] = (len(header) << 2) + 2                                                              # 包头长度

    # 组包
    logger.debug('包体数据:' + str(body))
    logger.debug('包头数据:' + str(header))
    senddata = header + body

    return (senddata, login_aes_key)
def writeDelimitedTo(message, stream):
    message_str = message.SerializeToString()
    delimiter = encoder._VarintBytes(len(message_str))

    stream.write(delimiter + message_str)
Exemple #47
0
#!/bin/python3

import msg_pb2
import socket
import sys
from google.protobuf.internal import encoder

theip = sys.argv[1]

print(theip)
print(str(sys.argv))

msg = msg_pb2.PGPMessage()
msg.type = msg_pb2.PGPMessage.SG_ACK
msg.ack.ip = theip
msg.ack.port = 6789
msg.ack.uuid = "123e4567-e89b-12d3-a456-426655440000"
msg.ack.name = "username"


msgs = msg.SerializeToString()
msgl = encoder._VarintBytes(len(msgs))
msgf = msgl + msgs

s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.sendto(msgf, (theip, 6789))

 def sendMsg(self, resp):
     #print "Sending info"  
     data = resp.SerializeToString()
     self.con.sendall(encoder._VarintBytes(len(data)) + data)
Exemple #49
0
 def proto_length_serializer(data: Any) -> bytes:
     f = io.BytesIO()
     f.write(_VarintBytes(data.ByteSize()))
     f.write(data.SerializeToString())
     f.seek(0)
     return f.read()
Exemple #50
0
 def encode_delimited(cls, x, typ):
     """ Encode a message or value with size information
         (for use in a delimited communication stream) """
     data = cls.encode(x, typ)
     delimiter = protobuf_encoder._VarintBytes(len(data))
     return delimiter + data
Exemple #51
0
def deliminate_msg(packetMessage):
    serializedMessage = packetMessage.SerializeToString()
    delimiter = encoder._VarintBytes(len(serializedMessage))
    return delimiter + serializedMessage
Exemple #52
0
 
''' Send to server in the order given above'''
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.settimeout(10)
sock.connect(("localhost", 8020))

sock.send("hrpc") # header
sock.send(struct.pack('B', 9)) # version
sock.send(struct.pack('B', 0x00)) # RPC service class
sock.send(struct.pack('B', 0x00)) # auth none


sock.sendall(struct.pack('!I', rpcipc_length) + 
encoder._VarintBytes(len(s_rpcrequestheader)) + 
s_rpcrequestheader + 
encoder._VarintBytes(len(s_context)) + 
s_context)


'''
Create the Hadoop RPC protocol looks like this for sending requests:
When sending requests
+---------------------------------------------------------------------+
| Length of the next three parts (4 bytes/32 bit int) |
+---------------------------------------------------------------------+
| Delimited serialized RpcRequestHeaderProto (varint len + header) |
+---------------------------------------------------------------------+
| Delimited serialized RequestHeaderProto (varint len + header) |
+---------------------------------------------------------------------+
Exemple #53
0
 def write(self, message):
     serialized = message.SerializeToString()
     size = len(serialized)
     self._writer.write(_VarintBytes(size))
     self._writer.write(serialized)
     self._writer.flush()
Exemple #54
0
def send_request_on_socket(request, socket):
    serialized_request = request.SerializeToString()
    size = request.ByteSize()
    socket.sendall(_VarintBytes(size) + serialized_request)
Exemple #55
0
def login_req2buf(name,password):
    #随机生成16位登录包AesKey
    login_aes_key = bytes(''.join(random.sample(string.ascii_letters + string.digits, 16)), encoding = "utf8")

    #protobuf组包1
    accountRequest = mm_pb2.ManualAuthAccountRequest(
        aes     = mm_pb2.ManualAuthAccountRequest.AesKey(
            len = 16,
            key = login_aes_key
        ),
        ecdh    = mm_pb2.ManualAuthAccountRequest.Ecdh(
            nid = 713,
            ecdhKey = mm_pb2.ManualAuthAccountRequest.Ecdh.EcdhKey(
                len = len(Util.EcdhPubKey),
                key = Util.EcdhPubKey
            )
        ),
        userName = name,
        password1 = Util.GetMd5(password),
        password2 = Util.GetMd5(password)
    )
    #protobuf组包2
    deviceRequest = mm_pb2.ManualAuthDeviceRequest(
        login = mm_pb2.LoginInfo(
            aesKey = login_aes_key,
            uin = 0,
            guid = define.__GUID__ + '\0',          #guid以\0结尾
            clientVer = define.__CLIENT_VERSION__,
            androidVer = define.__ANDROID_VER__,
            unknown = 1,
        ),
        tag2 = mm_pb2.ManualAuthDeviceRequest._Tag2(),
        imei = define.__IMEI__,
        softInfoXml = define.__SOFTINFO__.format(define.__IMEI__,define.__ANDROID_ID__, define.__MANUFACTURER__+" "+define.__MODELNAME__, define.__MOBILE_WIFI_MAC_ADDRESS__, define.__CLIENT_SEQID_SIGN__, define.__AP_BSSID__, define.__MANUFACTURER__,"taurus", define.__MODELNAME__, define.__IMEI__),
        unknown5 = 0,
        clientSeqID = define.__CLIENT_SEQID__,
        clientSeqID_sign = define.__CLIENT_SEQID_SIGN__,
        loginDeviceName = define.__MANUFACTURER__+" "+define.__MODELNAME__,
        deviceInfoXml = define.__DEVICEINFO__.format(define.__MANUFACTURER__, define.__MODELNAME__),
        language = define.__LANGUAGE__,
        timeZone = "8.00",
        unknown13 = 0,
        unknown14 = 0,
        deviceBrand = define.__MANUFACTURER__,
        deviceModel = define.__MODELNAME__+"armeabi-v7a",
        osType = define.__ANDROID_VER__,
        realCountry = "cn",
        unknown22 = 2,                      #Unknown
    )
    
    logger.debug("accountData protobuf数据:" + str(accountRequest.SerializeToString()))
    logger.debug("deviceData protobuf数据:" + str(deviceRequest.SerializeToString()))

    #加密
    reqAccount = Util.compress_and_rsa(accountRequest.SerializeToString())
    reqDevice  = Util.compress_and_aes(deviceRequest.SerializeToString(),login_aes_key)

    logger.debug("加密后数据长度:reqAccount={},reqDevice={}".format(len(reqAccount),len(reqDevice[0])))
    logger.debug("加密后reqAccount数据:" + str(reqAccount))
    logger.debug("加密后reqDevice数据:" + str(reqDevice[0]))

    #封包包体
    subheader = b''
    subheader += struct.pack(">I",len(accountRequest.SerializeToString()))          #accountData protobuf长度
    subheader += struct.pack(">I",len(deviceRequest.SerializeToString()))           #deviceData protobuf长度
    subheader += struct.pack(">I",len(reqAccount))                                  #accountData RSA加密后长度
    body   =  subheader + reqAccount + reqDevice[0]                                 #包体由头信息、账号密码加密后数据、硬件设备信息加密后数据3部分组成
    
    #封包包头
    header = bytearray(0)
    header += bytes([0])                                                            #最后2bit:02--包体不使用压缩算法;前6bit:包头长度,最后计算                                        #
    header += bytes([((0x7<<4) + 0xf)])                                             #07:RSA加密算法  0xf:cookie长度
    header += struct.pack(">I",define.__CLIENT_VERSION__)                           #客户端版本号 网络字节序
    header += bytes([0]*4)                                                          #uin
    header += bytes([0]*15)                                                         #coockie
    header += encoder._VarintBytes(701)                                             #cgi type
    header += encoder._VarintBytes(len(body))                                       #body 压缩前长度
    header += encoder._VarintBytes(len(body))                                       #body 压缩后长度(登录包不需要压缩body数据)
    header += struct.pack(">B",define.__LOGIN_RSA_VER__)                            #RSA秘钥版本
    header += b'\x01\x02'                                                           #Unknown Param
    header[0] = (len(header)<<2) + 2                                                #包头长度

    #组包
    logger.debug('包体数据:' + str(body))
    logger.debug('包头数据:' + str(header))
    senddata = header + body
    
    return (senddata,login_aes_key)
Exemple #56
0
 def encode_message_with_size(cls, message):
     """ Encode a protobuf message, prepended with its size """
     data = message.SerializeToString()
     size = protobuf_encoder._VarintBytes(len(data))
     return size + data