def testGetBuildStages(self):
     """Test the GetBuildStages functionality."""
     bbv2 = buildbucket_v2.BuildbucketV2()
     start_time = Timestamp()
     start_time.GetCurrentTime()
     step = step_pb2.Step(name='stage_name',
                          start_time=start_time,
                          status=2)
     build_with_steps = build_pb2.Build(steps=[step])
     get_build_fn = self.PatchObject(bbv2,
                                     'GetBuild',
                                     return_value=build_with_steps)
     get_build_status_fn = self.PatchObject(
         bbv2,
         'GetBuildStatus',
         return_value={'build_config': 'something-paladin'})
     expected_result = [{
         'name':
         'stage_name',
         'start_time':
         datetime.fromtimestamp(start_time.seconds),
         'finish_time':
         None,
         'buildbucket_id':
         1234,
         'status':
         constants.BUILDER_STATUS_INFLIGHT,
         'build_config':
         'something-paladin'
     }]
     self.assertEqual(bbv2.GetBuildStages(1234), expected_result)
     get_build_fn.assert_called_once_with(1234, properties='steps')
     get_build_status_fn.assert_called_once_with(1234)
Example #2
0
def main():

    with open("localhost.cer", "rb") as certFile:
        cert = certFile.read()

    credentials = grpc.ssl_channel_credentials(cert)

    channel = grpc.secure_channel("localhost:5001", credentials)
    stub = MeterReaderService.MeterReadingServiceStub(channel)

    now = Timestamp()
    now.GetCurrentTime()

    request = MeterReader.ReadingPacketMessage(successful=Enums.SUCCESS)
    reading = MeterReader.MeterReadingMessage(customerId=1,
                                              readingValue=1,
                                              readingTime=now)
    request.readings.append(reading)

    result = stub.AddReading(request)

    if (result.success == Enums.SUCCESS):
        print("Success")
    else:
        print("Failure")
Example #3
0
def make_child(child_pb, child_desc):
    res = None
    is_repeated = (type(child_pb) is RepeatedCompositeContainer) or (
        type(child_pb) is RepeatedScalarContainer)
    # the child is a generic type
    if child_desc.message_type is None:
        if is_repeated:
            res = []
            for _ in range(random.randint(0, 20)):
                if type(child_pb) is RepeatedScalarContainer:
                    res.append(fill_variable(int))
                else:
                    res.append(fill_variable(type(child_pb)))
        else:
            res = fill_variable(type(child_pb))
    else:
        if type(child_pb) is Timestamp:
            res = Timestamp()
            res.GetCurrentTime()
            return res
        pb_elem = _sym_db._classes[child_desc.message_type]
        if is_repeated:
            res = []
            for _ in range(random.randint(0, 20)):
                res.append(
                    make_mock_pb(pb_elem(),
                                 target_desc=pb_elem.DESCRIPTOR,
                                 target_pb_elem=pb_elem))
        else:
            res = make_mock_pb(child_pb, target_desc=pb_elem.DESCRIPTOR)
    return res
Example #4
0
 def test_decode_time(self):
     any_message = Any()
     timestamp_message = Timestamp()
     timestamp_message.GetCurrentTime()
     any_message.Pack(timestamp_message)
     assert timestamp_message.ToDatetime() == Client._decode_any(
         any_message)
Example #5
0
def main():
    print("Calling gRPC Service")

    #need to download .crt from browser, every connection is forced to be secure by protocol,
    with open("localhost.pem", "rb") as file:
        cert = file.read()

    credentials = grpc.ssl_channel_credentials(cert)

    channel = grpc.secure_channel("localhost:5001", credentials)
    stub = MeterReaderService.MeterReadingServiceStub(channel)

    request = MeterReader.ReadingPacket(successful = Enum.ReadingStatus.Success)
    
    now = Timestamp()
    now.GetCurrentTime()
    x = random.randint(1,10000)
    reading = MeterReader.ReadingMessage(customerId = x,
                                        readingValue = 1,
                                        readingTime = now)

    request.readings.append(reading)
    result = stub.AddReading(request)

    if (result.success == Enum.ReadingStatus.Success):
        print("Success")
    else:
        print("Failure")
Example #6
0
def finding_name(source_name):
    """Creates a new finding and returns it name."""
    from google.cloud import securitycenter
    from google.cloud.securitycenter_v1.proto.finding_pb2 import Finding
    from google.protobuf.timestamp_pb2 import Timestamp

    client = securitycenter.SecurityCenterClient()

    now_proto = Timestamp()
    now_proto.GetCurrentTime()

    finding = client.create_finding(
        source_name,
        "scfinding",
        {
            "state": Finding.ACTIVE,
            "category": "C1",
            "event_time": now_proto,
            "resource_name": "//cloudresourcemanager.googleapis.com/organizations/1234",
        },
    )
    client.create_finding(
        source_name,
        "untouched",
        {
            "state": Finding.ACTIVE,
            "category": "MEDIUM_RISK_ONE",
            "event_time": now_proto,
            "resource_name": "//cloudresourcemanager.googleapis.com/organizations/1234",
        },
    )

    return finding.name
Example #7
0
def build_grpc_response(request_id, response):
  if response[CDS_IS_SUCCESSFUL_KEY]:
    status = CommandExecutor_pb2.SUCCESS
  else:
    status = CommandExecutor_pb2.FAILURE

  response.pop(CDS_IS_SUCCESSFUL_KEY)
  logs = response.pop(RESULTS_LOG_KEY)

  errorMessage = ""
  if ERR_MSG_KEY in response:
    errorMessage = '\n'.join(response.pop(ERR_MSG_KEY))

  # Payload should only contain response data returned from the executed script and/or the error message
  payload = json.dumps(response)

  timestamp = Timestamp()
  timestamp.GetCurrentTime()

  execution_output = CommandExecutor_pb2.ExecutionOutput(requestId=request_id,
                                                         response=logs,
                                                         status=status,
                                                         payload=payload,
                                                         timestamp=timestamp,
                                                         errMsg=errorMessage)

  return truncate_execution_output(execution_output)
Example #8
0
def run(ip, port):
    timestamp = Timestamp()
    timestamp.GetCurrentTime()
    channel = grpc.insecure_channel('%s:%s'%(ip, port))
    stub = prediction_pb2_grpc.ProxyServerStub(channel)
    response = stub.downstream(prediction_pb2.request(input_ = model_pb2.input(inputType = 'string', inputStream = 'hello'),src_uri = "localhost", seq = 1, req_id =1, timestamp = timestamp))
    print('Response\n{res}'.format(res=response.status))
Example #9
0
def update_finding_state(source_name):
    """Demonstrate updating only a finding state."""
    # [START update_finding_state]
    from google.cloud import securitycenter
    from google.cloud.securitycenter_v1.proto.finding_pb2 import Finding
    from google.protobuf.timestamp_pb2 import Timestamp

    # Create a client.
    client = securitycenter.SecurityCenterClient()
    # source_name is the resource path for a source that has been
    # created previously (you can use list_sources to find a specific one).
    # Its format is:
    # source_name = "organizations/{organization_id}/sources/{source_id}"
    # e.g.:
    # source_name = "organizations/111122222444/sources/1234"
    finding_name = "{}/findings/samplefindingid2".format(source_name)

    now_proto = Timestamp()
    now_proto.GetCurrentTime()

    # Call the API to change the finding state to inactive as of now.
    new_finding = client.set_finding_state(finding_name,
                                           Finding.INACTIVE,
                                           start_time=now_proto)
    print("New state: {}".format(Finding.State.Name(new_finding.state)))
Example #10
0
 def _ingest_request(self):
     """ Interate through the metrics and create an IngestRequest
     """
     self._update_service_info()
     request = IngestRequest(reporter=self._reporter)
     request.idempotency_key = self._generate_idempotency_key()
     start_time = Timestamp()
     start_time.GetCurrentTime()
     duration = Duration()
     duration.FromSeconds(self._intervals * self._flush_interval)
     for metric in self._runtime_metrics:
         metric_type = MetricKind.GAUGE
         if len(metric) == 3:
             key, value, metric_type = metric
         else:
             key, value = metric
         request.points.add(
             duration=duration,
             start=start_time,
             labels=self._labels,
             metric_name=key,
             double_value=value,
             kind=metric_type,
         )
     _log.debug("Metrics collected: %s", request)
     return request
    def publish_metrics(self, data, event_name, onu_device_id):
        """

        :param data:  actual test result dict
        :param event_name: Test_result
        :param onu_device_id:  Onu device id
        :return: None
        """
        metric_data = MetricInformation(metadata=MetricMetaData(
            title=OmciTestRequest.OPTICAL_GROUP_NAME,
            ts=arrow.utcnow().float_timestamp,
            logical_device_id=self.logical_device_id,
            serial_no=self.serial_number,
            device_id=onu_device_id,
            uuid=self.uuid,
            context={'events': event_name}),
                                        metrics=data)
        self.log.info('Publish-Test-Result')
        raised_ts = Timestamp()
        raised_ts.GetCurrentTime()
        event_header = self.get_event_header(EventType.KPI_EVENT2,
                                             EventCategory.EQUIPMENT,
                                             EventSubCategory.ONU, "KPI_EVENT",
                                             raised_ts)
        kpi_event = KpiEvent2(type=KpiEventType.slice,
                              ts=arrow.utcnow().float_timestamp,
                              slice_data=[metric_data])
        event = Event(header=event_header, kpi_event2=kpi_event)
        self.core_proxy.submit_event(event)
Example #12
0
def scenario():
    # Creation of grpc channels and instances of act, check1 and estore stubs.
    factory = sf.connect(router_grpc="./configs/grpc.json",
                         rabbit_mq='./configs/rabbit.json',
                         router_mq='./configs/mq.json')

    # Storing EventID object of root Event.
    report_id = sf.create_event_id()

    # Storing grpc Timestamp of script start.
    report_start_timestamp = Timestamp()
    report_start_timestamp.GetCurrentTime()

    # Initialize chain_id for script
    ver1_chain = None
    ver2_chain = None
    scenario_id = 1
    # Sending request to estore. Creation of the root Event for all cases performed.
    sf.submit_event(
        estore=factory['estore'],
        event_batch=sf.create_event_batch(
            report_name=
            f"[TS_{scenario_id}]Aggressive IOC vs two orders: second order's price is lower than first",
            start_timestamp=report_start_timestamp,
            event_id=report_id))

    # Getting case participants from refdata
    trader1 = firms[0]['Traders'][0]['TraderName']
    trader1_firm = firms[0]['FirmName']
    trader1_fix = firms[0]['Traders'][0]['TraderConnection']
    trader2 = firms[1]['Traders'][0]['TraderName']
    trader2_firm = firms[1]['FirmName']
    trader2_fix = firms[1]['Traders'][0]['TraderConnection']

    case_id = 0
    # Execution of case for every instrument in refdata
    for instrument in instruments:
        case_id += 1
        ver1_chain, ver2_chain = aggressive_ioc_traded_against_two_orders_partially_and_then_cancelled(
            f"Case[TC_{scenario_id}.{case_id}]: "
            f"Trader {trader1} vs trader {trader2} for instrument {instrument['SecurityID']}",
            report_id, {
                'case_id': sf.create_event_id(),
                'Instrument': instrument['SecurityID'],
                'Order1Price': instrument['Price'],
                'Order1Qty': 30,
                'Order2Price': instrument['Price'] + 1,
                'Order2Qty': 10,
                'Order3Price': instrument['Price'] - 1,
                'Order3Qty': 100,
                'trader1': trader1,
                'trader1_firm': trader1_firm,
                'trader1_fix': trader1_fix,
                'trader2': trader2,
                'trader2_firm': trader2_firm,
                'trader2_fix': trader2_fix,
                'ver1_chain': ver1_chain,
                'ver2_chain': ver2_chain
            }, factory)
Example #13
0
    def SubmitUpdate(self, request, context):
        """
        handling client's submission of the federated updates
        running aggregation if there are enough updates
        """
        self.update_error = False
        token = self.validate_client(request.client, context)

        if token is None:
            response_comment = 'Ignored the submit from invalid client. '
            self.logger.info(response_comment)

        # if len(self.accumulator) > self.min_num_clients:
        #     context.abort(grpc.StatusCode.ALREADY_EXISTS,
        #                   'Contrib: already enough in the current round')
        else:

            model_meta = self.is_valid_contribution(request.client.meta)
            if model_meta is None:
                context.abort(grpc.StatusCode.FAILED_PRECONDITION,
                              'Contrib: invalid for the current round')
                response_comment = 'Invalid contribution. '
                self.logger.info(response_comment)
            else:

                client_contrib_id = '{}_{}_{}'.format(model_meta.task.name, token,
                                                      model_meta.current_round)

                start_time = request.client.meta.created
                timenow = Timestamp()
                timenow.GetCurrentTime()
                time_seconds = timenow.seconds - start_time.seconds
                self.logger.info(
                    'received %s (%s Bytes, %s seconds)', client_contrib_id,
                    request.ByteSize(), time_seconds or 'less than 1')

                if self.save_contribution(client_contrib_id, request):
                    with self.lock:
                        self.accumulator.append(request)
                        # if self.get_enough_updates():
                        #     self.aggregate()
                        num_of_updates = len(self.accumulator)

                    # Only the first one meets the minimum clients trigger the aggregation.
                    if num_of_updates == self.min_num_clients:
                        if num_of_updates < len(self.auth_client_id):
                            self.logger.debug("Starting to wait. {}".format(self.wait_after_min_clients))
                            time.sleep(self.wait_after_min_clients)
                        self.aggregate()

                response_comment = \
                    'Received round {} from {} ({} Bytes, {} seconds)'.format(
                        request.client.meta.current_round, request.client.uid,
                        request.ByteSize(), time_seconds or 'less than 1')

        summary_info = fed_msg.FederatedSummary(comment=response_comment)
        if self.model_meta_info is not None:
            summary_info.meta.CopyFrom(self.model_meta_info)
        return summary_info
Example #14
0
def build_grpc_blueprint_upload_response(request_id, subrequest_id, success=True, payload=[]):
  timestamp = Timestamp()
  timestamp.GetCurrentTime()
  return CommandExecutor_pb2.UploadBlueprintOutput(requestId=request_id,
    subRequestId=subrequest_id,
    status=CommandExecutor_pb2.SUCCESS if success else CommandExecutor_pb2.FAILURE,
    timestamp=timestamp,
    payload=json.dumps(payload))
Example #15
0
 def _model_to_proto(self, token_model: TokenModel):
     date_created = Timestamp()
     date_created.GetCurrentTime()
     date_created.seconds = int(token_model.date_created.timestamp())
     return Token(id=token_model.id,
                  uid=token_model.uid,
                  name=token_model.name,
                  date_created=date_created)
Example #16
0
def current_timestamp():
    """Get current timestamp.

    :return: Current timestamp.
    """
    timestamp = Timestamp()
    timestamp.GetCurrentTime()
    return timestamp
Example #17
0
def generate_objects():
    timestamp = Timestamp()
    timestamp.GetCurrentTime()
    header = Header(frame_id="some_frame", stamp=timestamp)
    detectedObjects = [generate_object(i) for i in range(2)]
    return DetectedObjects(
        header=header,
        detectedObjects=detectedObjects,
    )
Example #18
0
def build_grpc_blueprint_validation_response(request_id, subrequest_id,
    cba_uuid, success=True):
  timestamp = Timestamp()
  timestamp.GetCurrentTime()
  return CommandExecutor_pb2.BlueprintValidationOutput(requestId=request_id,
                                                subRequestId=subrequest_id,
                                                status=CommandExecutor_pb2.SUCCESS if success else CommandExecutor_pb2.FAILURE,
                                                cbaUUID=cba_uuid,
                                                timestamp=timestamp)
Example #19
0
def current_timestamp():
    """Get current timestamp

    Returns: current timestamp

    """
    timestamp = Timestamp()
    timestamp.GetCurrentTime()
    return timestamp
Example #20
0
 def _get_keyboard_event(event_type: KeyboardEventType,
                         key_name: str) -> Event:
     ts = Timestamp()
     ts.GetCurrentTime()
     event_type = KeyboardEvent.KEYDOWN \
         if event_type == KeyboardEventType.KeyDown \
         else KeyboardEvent.KEYUP
     keyboard_event = KeyboardEvent(ts=ts, key=key_name, type=event_type)
     return Event(keyboard=keyboard_event)
 def testGetBuildStatusWithValidId(self):
     """Tests for GetBuildStatus with a valid ID."""
     properties_dict = {
         'cidb_id': '1234',
         'bot_id': 'swarm-cros-34',
         'cbb_branch': 'master',
         'cbb_config': 'sludge-paladin-tryjob',
         'cbb_master_build_id': '4321',
         'platform_version': '11721.0.0',
         'milestone_version': '74',
         'full_version': 'R74-11721.0.0-b3457724',
         'critical': '1',
         'build_type': 'Try',
     }
     start_time = Timestamp()
     start_time.GetCurrentTime()
     fake_properties = Struct(
         fields={
             key: Value(string_value=value)
             for key, value in properties_dict.items()
         })
     fake_output = build_pb2.Build.Output(properties=fake_properties)
     fake_build = build_pb2.Build(id=1234,
                                  start_time=start_time,
                                  status=2,
                                  output=fake_output)
     self.PatchObject(buildbucket_v2.BuildbucketV2,
                      'GetBuild',
                      return_value=fake_build)
     expected_valid_status = {
         'build_config': 'sludge-paladin-tryjob',
         'start_time': datetime.fromtimestamp(start_time.seconds),
         'finish_time': None,
         'id': 1234,
         'status': constants.BUILDER_STATUS_INFLIGHT,
         'chrome_version': None,
         'platform_version': '11721.0.0',
         'milestone_version': '74',
         'full_version': 'R74-11721.0.0-b3457724',
         'important': 1,
         'buildbucket_id': 1234,
         'summary': None,
         'master_build_id': 4321,
         'bot_hostname': 'swarm-cros-34',
         'builder_name': None,
         'build_number': None,
         'buildbot_generation': None,
         'waterfall': None,
         'deadline': None,
         'build_type': 'Try',
         'metadata_url': None,
         'toolchain_url': None,
         'branch': 'master'
     }
     bbv2 = buildbucket_v2.BuildbucketV2()
     status = bbv2.GetBuildStatus(1234)
     self.assertEqual(status, expected_valid_status)
Example #22
0
def build_response(request, results, is_success=True):
    if is_success:
        status = CommandExecutor_pb2.SUCCESS
    else:
        status = CommandExecutor_pb2.FAILURE

    timestamp = Timestamp()
    timestamp.GetCurrentTime()
    return CommandExecutor_pb2.ExecutionOutput(requestId=request.requestId, response=results, status=status,
                                               timestamp=timestamp)
Example #23
0
def ack_response(input: ExecutionServiceInput):
    timestamp = Timestamp()
    timestamp.GetCurrentTime()
    response_common_header = input.commonHeader
    status = Status()
    status.timestamp = current_time()
    status.eventType = EVENT_COMPONENT_PROCESSING
    return ExecutionServiceOutput(commonHeader=response_common_header,
                                  actionIdentifiers=input.actionIdentifiers,
                                  status=status)
Example #24
0
def purge_aims_new_address_subscription():
    timestamp = Timestamp()
    timestamp.GetCurrentTime()
    try:
        # Try purging via the seek method
        # Seeking to now should ack any messages published before this moment
        subscriber.seek(aims_subscription_path, time=timestamp)
    except MethodNotImplemented:
        # Seek is not implemented by the pubsub-emulator
        ack_all_on_aims_new_address_subscription()
        def on_request_ended(errors):
            start_nanos = self.trace.start_time.ToNanoseconds()
            now = Timestamp()
            now.GetCurrentTime()
            self.trace.duration_ns = now.ToNanoseconds() - start_nanos
            self.trace.end_time.GetCurrentTime()

            op_name = self.operation_name or ''
            self.trace.root.MergeFrom(self.nodes.get(''))
            yield self.add_trace(op_name, self.document.document_ast,
                                 self.query_string, self.trace)
Example #26
0
def get_datetime_proto():
    """Returns current date time proto in UTC.

    Returns
    -------
    datetime_proto: google.protobuf.timestamp_pb2.Timestamp
        Current date time proto object in UTC.
    """
    timestamp = Timestamp()
    timestamp.GetCurrentTime()
    return timestamp
Example #27
0
def success_response(input: ExecutionServiceInput, property_json: json, code: int):
    timestamp = Timestamp()
    timestamp.GetCurrentTime()
    status = Status()
    status.timestamp = current_time()
    status.eventType = EVENT_COMPONENT_EXECUTED
    status.code = code
    status.message = 'success'
    payload_struct = create_response_payload_from_json(input.actionIdentifiers.actionName, property_json)
    return ExecutionServiceOutput(commonHeader=input.commonHeader,
                                  actionIdentifiers=input.actionIdentifiers, status=status, payload=payload_struct)
Example #28
0
class OpenCensusMetricsExporter(MetricsExporter):
    """OpenCensus metrics exporter.

    Args:
        endpoint: OpenCensus Collector receiver endpoint.
        service_name: Name of Collector service.
        host_name: Host name.
        client: MetricsService client stub.
    """
    def __init__(
        self,
        endpoint: str = DEFAULT_ENDPOINT,
        service_name: str = None,
        host_name: str = None,
        client: metrics_service_pb2_grpc.MetricsServiceStub = None,
    ):
        self.endpoint = endpoint
        if client is None:
            channel = grpc.insecure_channel(self.endpoint)
            self.client = metrics_service_pb2_grpc.MetricsServiceStub(
                channel=channel)
        else:
            self.client = client

        self.node = utils.get_node(service_name, host_name)
        self.exporter_start_timestamp = Timestamp()
        self.exporter_start_timestamp.GetCurrentTime()

    def export(self,
               metric_records: Sequence[MetricRecord]) -> MetricsExportResult:
        try:
            responses = self.client.Export(
                self.generate_metrics_requests(metric_records))

            # Read response
            for _ in responses:
                pass

        except grpc.RpcError:
            return MetricsExportResult.FAILURE

        return MetricsExportResult.SUCCESS

    def shutdown(self) -> None:
        pass

    def generate_metrics_requests(
        self, metrics: Sequence[MetricRecord]
    ) -> metrics_service_pb2.ExportMetricsServiceRequest:
        collector_metrics = translate_to_collector(
            metrics, self.exporter_start_timestamp)
        service_request = metrics_service_pb2.ExportMetricsServiceRequest(
            node=self.node, metrics=collector_metrics)
        yield service_request
Example #29
0
 def testTimestampToDatetime(self):
     # Test None input.
     self.assertIsNone(utils.TimestampToDatetime(None))
     # Test empty input.
     time1 = Timestamp()
     self.assertIsNone(utils.TimestampToDatetime(time1))
     # Test valid input.
     time1.GetCurrentTime()
     formatted_time = utils.TimestampToDatetime(time1)
     self.assertIsNotNone(formatted_time)
     self.assertTrue(isinstance(formatted_time, datetime))
Example #30
0
    def test_message_in_message(self):
        """
        Test to verify we can send different message types over same Kafka topic and re-construct at reciever. To do
        this we have a simple tunnel message that maps message type to an identifier and takes the pre serialised
        message and sends as a raw byte string.

        The test has three message types which it picks at random, encodes and sends via Kafka and we then check the
        rx'ed version is same as was sent.
        """
        pc = ProtoCopy()
        pc.register(native_object_type=Message1, proto_buf_type=PBMessage1)
        pc.register(native_object_type=Message2, proto_buf_type=PBMessage2)
        pc.register(native_object_type=Message3, proto_buf_type=PBMessage3)

        # Create an instance of each message type.
        message1 = Message1(field=Gibberish.more_gibber(),
                            state=State.S2,
                            tasks=[Task(task_name="Task-1", task_id=1), Task(task_name="Task-2", task_id=2)])
        message2 = Message2(field_X=Gibberish.more_gibber(),
                            m2=3142,
                            state=State.S1,
                            tasks=[Task(task_name="Task-3", task_id=3),
                                   Task(task_name="Task-4", task_id=4),
                                   Task(task_name="Task-5", task_id=5)])
        message3 = Message3(field_Y=Gibberish.more_gibber(),
                            m3=6284,
                            state=State.S3,
                            tasks=[Task(task_name="Task-6", task_id=6)])

        msg_map = {0: message1, 1: message2, 2: message3}

        timestamp = Timestamp()
        for x in range(0, 25):
            mtype = np.random.randint(3, size=1)[0]
            msg_2_send = msg_map[mtype]

            # Create tunnel TX message
            tunnel_tx = PBNotification()
            tunnel_tx._type = mtype
            tunnel_tx._payload = pc.serialize(msg_2_send)
            tunnel_tx.my_field = timestamp.GetCurrentTime()
            serialized_tunnel_message = tunnel_tx.SerializeToString()

            # Create tunnel RX message
            tunnel_rx = PBNotification()
            tunnel_rx.ParseFromString(serialized_tunnel_message)

            # Reconstruct Original message after tunnel
            expected = msg_map[tunnel_rx._type]
            actual = pc.deserialize(tunnel_rx._payload, target_type=type(expected))
            self.assertEqual(expected, actual)

        return