Пример #1
0
    def test_cbsd_not_unregistered_when_coordinates_change_less_than_10_m(
            self):
        builder = CbsdAPIDataBuilder()\
            .with_single_step_enabled(True) \
            .with_capabilities() \
            .with_frequency_preferences() \
            .with_desired_state() \
            .with_serial_number(self.serial_number) \
            .with_full_installation_param() \
            .with_cbsd_category("a")

        self.given_single_step_cbsd_provisioned(builder)
        with self.while_cbsd_is_active():
            filters = get_filters_for_request_type('deregistration',
                                                   self.serial_number)

            update_request = EnodebdUpdateCbsdRequest(
                serial_number=self.serial_number,
                installation_param=InstallationParam(
                    antenna_gain=DoubleValue(value=15),
                    latitude_deg=DoubleValue(value=10.500001),
                    longitude_deg=DoubleValue(value=11.5000001),
                    indoor_deployment=BoolValue(value=True),
                    height_type=StringValue(value="agl"),
                    height_m=DoubleValue(value=12.5),
                ),
                cbsd_category="a",
            )

            self.when_cbsd_is_updated_by_enodebd(update_request)
            cbsd = self.when_cbsd_is_fetched(self.serial_number)
            self.then_cbsd_is(cbsd, builder.payload)
            self.then_message_is_never_sent(filters)
Пример #2
0
 def set_compliancy_left(self, compliancy) -> None:
     """Set left arm stiff or compliant."""
     request = JointsCommand(commands=[
         JointCommand(id=name, compliant=BoolValue(value=compliancy))
         for name in self.left_arm
     ])
     self.stub.SendJointsCommands(request)
Пример #3
0
def build_enodebd_update_cbsd_request(
    serial_number: str,
    latitude_deg: str,
    longitude_deg: str,
    indoor_deployment: str,
    antenna_height: str,
    antenna_height_type: str,
    cbsd_category: str,
) -> EnodebdUpdateCbsdRequest:
    # cbsd category and antenna height type should be converted to lowercase
    # for the gRPC call
    antenna_height_type = antenna_height_type.lower()
    cbsd_category = cbsd_category.lower()
    # lat and long values are part of tr181 specification, but they are kept in device config
    # transformed and eventually kept within the device config as strings representing degrees
    latitude_deg_float = float(latitude_deg)
    longitude_deg_float = float(longitude_deg)

    indoor_deployment_bool = _indoortobool(indoor_deployment)
    antenna_height_float = float(antenna_height)

    installation_param = InstallationParam(
        latitude_deg=DoubleValue(value=latitude_deg_float),
        longitude_deg=DoubleValue(value=longitude_deg_float),
        indoor_deployment=BoolValue(value=indoor_deployment_bool),
        height_m=DoubleValue(value=antenna_height_float),
        height_type=StringValue(value=antenna_height_type),
    )

    return EnodebdUpdateCbsdRequest(
        serial_number=serial_number,
        installation_param=installation_param,
        cbsd_category=cbsd_category,
    )
Пример #4
0
    def build_config(self, use_defaults=False):
        # SLICING SPEC
        slicing_specs = [tfma.SlicingSpec()]
        if self.slices:
            slicing_specs.extend([tfma.SlicingSpec(feature_keys=e)
                                  for e in self.slices])

        # MODEL SPEC
        metric_labels = sorted(list(set(self.metrics.keys())))
        model_specs = [tfma.ModelSpec(signature_name='zen_eval',
                                      label_keys=self.output_mapping)]

        # METRIC SPEC
        baseline = [tfma.MetricConfig(class_name='ExampleCount')]
        metrics_specs = []
        for i, key in enumerate(metric_labels):
            metrics = baseline.copy()
            metrics.extend([tfma.MetricConfig(class_name=to_camel_case(m))
                            for m in self.metrics[key]])

            metrics_specs.append(tfma.MetricsSpec(
                output_names=[key],
                metrics=metrics))

        return tfma.EvalConfig(
            model_specs=model_specs,
            slicing_specs=slicing_specs,
            metrics_specs=metrics_specs,
            options=tfma.Options(
                include_default_metrics=BoolValue(value=use_defaults)))
Пример #5
0
 async def Check(self, stream: Stream[Request, Reply]) -> None:
     request = await stream.recv_message()
     assert request is not None
     number_is_prime = await self._loop.run_in_executor(
         self._executor, is_prime, request.number)
     reply = Reply(is_prime=BoolValue(value=number_is_prime))
     await stream.send_message(reply)
Пример #6
0
def test_tracer_end_traced_span_do_post(m_thread, tracer):
    tracer.memory.current_span_id = 'test_span_id'
    tracer._post_spans_to_stackdriver_api = True
    tracer.stackdriver_trace_client = MagicMock()
    tracer.stackdriver_trace_client.span_path.return_value = 'test_span_name'
    tracer._delete_current_span = MagicMock()

    tracer.end_traced_span(exclude_from_posting=False)

    tracer.logger.debug.assert_called_with("Closing span test_span_id")
    tracer.stackdriver_trace_client.span_path.assert_called_with(
        'test_project_name', 'test_trace_id', 'test_span_id')

    expected_span_info = {
        'name': 'test_span_name',
        'span_id': 'test_span_id',
        'display_name': 'test_truncated_str',
        'start_time': 'test_start_time',
        'end_time': 'test_timestamp',
        'parent_span_id': 'test_parent_span_id',
        'same_process_as_parent_span': BoolValue(value=False),
        'child_span_count': Int32Value(value=0)
    }
    m_thread.assert_called_with(target=post_span,
                                args=(
                                    tracer.stackdriver_trace_client,
                                    expected_span_info,
                                ))
    assert tracer._delete_current_span.called
Пример #7
0
    def build_config(self):
        # SLICING SPEC
        slicing_specs = [tfma.SlicingSpec()]
        if self.slices:
            slicing_specs.extend(
                [tfma.SlicingSpec(feature_keys=e) for e in self.slices])

        # MODEL SPEC
        model_specs = [
            tfma.ModelSpec(label_key=self.label_key,
                           prediction_key=self.prediction_key)
        ]

        # METRIC SPEC
        baseline = [tfma.MetricConfig(class_name='ExampleCount')]
        for key in self.metrics:
            baseline.append(tfma.MetricConfig(class_name=to_camel_case(key)))

        metrics_specs = [tfma.MetricsSpec(metrics=baseline)]

        return tfma.EvalConfig(
            model_specs=model_specs,
            slicing_specs=slicing_specs,
            metrics_specs=metrics_specs,
            options=tfma.Options(include_default_metrics=BoolValue(
                value=False)))
Пример #8
0
    def given_single_step_cbsd_provisioned(self,
                                           builder: CbsdAPIDataBuilder) -> int:
        self.when_cbsd_is_created(builder.payload)
        update_request = EnodebdUpdateCbsdRequest(
            serial_number=self.serial_number,
            installation_param=InstallationParam(
                antenna_gain=DoubleValue(value=15),
                latitude_deg=DoubleValue(value=10.5),
                longitude_deg=DoubleValue(value=11.5),
                indoor_deployment=BoolValue(value=True),
                height_type=StringValue(value="agl"),
                height_m=DoubleValue(value=12.5),
            ),
            cbsd_category="a",
        )
        self.when_cbsd_is_updated_by_enodebd(update_request)
        cbsd = self.when_cbsd_is_fetched(builder.payload["serial_number"])
        self.then_cbsd_is(
            cbsd,
            builder.with_state(UNREGISTERED).with_is_active(False).
            with_full_installation_param().with_cbsd_category("a").payload,
        )

        state = self.when_cbsd_asks_for_state()
        self.then_state_is(state, get_empty_state())

        cbsd = self._check_cbsd_successfully_provisioned(builder)

        return cbsd['id']
Пример #9
0
 def set_ir_colormap_async(self, colormap, min_temp, max_temp, auto_scale,
                           **kwargs):
     """Async version of set_ir_colormap()"""
     scale = compositor_pb2.IrColorMap.ScalingPair(min=min_temp,
                                                   max=max_temp)
     auto = BoolValue(value=auto_scale)
     ir_colormap = compositor_pb2.IrColorMap(colormap=colormap,
                                             scale=scale,
                                             auto_scale=auto)
     request = compositor_pb2.SetIrColormapRequest(map=ir_colormap)
     return self.call_async(self._stub.SetIrColormap, request,
                            self._return_response,
                            self._compositor_error_from_response, **kwargs)
Пример #10
0
 def build_enodebd_update_request(
         self,
         indoor_deployment=False,
         cbsd_category="a") -> EnodebdUpdateCbsdRequest:
     return EnodebdUpdateCbsdRequest(
         serial_number=self.payload["serial_number"],
         installation_param=InstallationParam(
             latitude_deg=DoubleValue(value=10.5),
             longitude_deg=DoubleValue(value=11.5),
             indoor_deployment=BoolValue(value=indoor_deployment),
             height_type=StringValue(value="agl"),
             height_m=DoubleValue(value=12.5),
         ),
         cbsd_category=cbsd_category,
     )
Пример #11
0
    def end_traced_span(self, exclude_from_posting=False):
        """
        End a span and collect details about the span, then post it to the API.

        Arguments:
            exclude_from_posting (bool): exclude this particular trace from being posted
        """
        self.logger.debug(f'Closing span {self.memory.current_span_id}')

        if self._post_spans_to_stackdriver_api and not exclude_from_posting:
            span_values = self.current_span['values']

            end_timestamp = get_timestamp()
            if self._post_spans_to_stackdriver_api:
                name = self.stackdriver_trace_client.span_path(
                    self.project_name, span_values[B3_TRACE_ID],
                    span_values[B3_SPAN_ID])
            else:
                name = f'{self.project_name}/{span_values[B3_TRACE_ID]}/{span_values[B3_SPAN_ID]}'

            span_info = {
                'name':
                name,
                'span_id':
                span_values[B3_SPAN_ID],
                'display_name':
                truncate_str(self.current_span['display_name'],
                             limit=SPAN_DISPLAY_NAME_BYTE_LIMIT),
                'start_time':
                self.current_span['start_timestamp'],
                'end_time':
                end_timestamp,
                'parent_span_id':
                span_values[B3_PARENT_SPAN_ID],
                'same_process_as_parent_span':
                BoolValue(value=False),
                'child_span_count':
                Int32Value(value=self.current_span['child_span_count'])
            }
            post_to_api_job = Thread(target=post_span,
                                     args=(self.stackdriver_trace_client,
                                           span_info))
            post_to_api_job.start()

        self._delete_current_span()
Пример #12
0
def optimize_anchoring(opt_info, client):
    """
    Sends an RPC to the robot which optimizes the anchoring and links it to the position of the
    fiducial in the blueprint.
    :param opt_info: info needed for the optimization.
    :param client: the map processing client.
    :return: the response to the process_anchoring rpc.
    """
    initial_hint = map_processing_pb2.AnchoringHint()
    object_hint = initial_hint.world_objects.add()
    object_hint.object_anchor.id = str(opt_info.fiducial_id)
    object_hint.object_anchor.seed_tform_object.CopyFrom(
        opt_info.get_fiducial_origin())
    return client.process_anchoring(
        params=map_processing_pb2.ProcessAnchoringRequest.Params(
            optimize_existing_anchoring=BoolValue(value=False)),
        modify_anchoring_on_server=False,
        stream_intermediate_results=False,
        initial_hint=initial_hint)
Пример #13
0
    def set_ir_colormap(self, colormap, min_temp, max_temp, auto_scale,
                        **kwargs):
        """Set IR colormap to use on Spot CAM

        Args:
            colormap (bosdyn.api.spot_cam.compositor_pb2.IrColorMap.ColorMap): IR display colormap
            min_temp (Float): minimum temperature on the temperature scale
            max_temp (Float): maximum temperature on the temperature scale
            auto_scale (Boolean): Auto-scale the color map. This is the most human-understandable
                option. min_temp and max_temp are ignored if this is set to True
            kwargs: extra arguments for controlling RPC details.
        """
        scale = compositor_pb2.IrColorMap.ScalingPair(min=min_temp,
                                                      max=max_temp)
        auto = BoolValue(value=auto_scale)
        ir_colormap = compositor_pb2.IrColorMap(colormap=colormap,
                                                scale=scale,
                                                auto_scale=auto)
        request = compositor_pb2.SetIrColormapRequest(map=ir_colormap)
        return self.call(self._stub.SetIrColormap, request,
                         self._return_response,
                         self._compositor_error_from_response, **kwargs)
Пример #14
0
    def test_enodebd_update_cbsd(self) -> None:
        builder = CbsdAPIDataBuilder() \
            .with_capabilities() \
            .with_frequency_preferences() \
            .with_desired_state() \
            .with_antenna_gain(15) \
            .with_serial_number(self.serial_number) \
            .with_cbsd_category("b")
        self.when_cbsd_is_created(builder.payload)
        cbsd = self.when_cbsd_is_fetched(self.serial_number)
        self.then_cbsd_is(
            cbsd,
            builder.with_indoor_deployment(False).with_is_active(
                False).with_state(UNREGISTERED).payload,
        )

        req = EnodebdUpdateCbsdRequest(
            serial_number=self.serial_number,
            cbsd_category="a",
            installation_param=InstallationParam(
                latitude_deg=DoubleValue(value=10.5),
                longitude_deg=DoubleValue(value=11.5),
                height_m=DoubleValue(value=12.5),
                height_type=StringValue(value="agl"),
                indoor_deployment=BoolValue(value=True),
                antenna_gain=DoubleValue(value=15),
            ),
        )
        self.when_cbsd_is_updated_by_enodebd(req)
        self.then_logs_are({"serial_number": self.serial_number},
                           ["EnodebdUpdateCbsd"])
        cbsd = self.when_cbsd_is_fetched(self.serial_number)
        self.then_cbsd_is(
            cbsd,
            builder.with_cbsd_category(
                "a").with_full_installation_param().payload)
Пример #15
0
def translate_to_trace_proto(span_data):
    """Translates the opencensus spans to ocagent proto spans.

    :type span_data: :class:`~opencensus.trace.span_data.SpanData`
    :param span_data: SpanData tuples to convert to protobuf spans

    :rtype: :class:`~opencensus.proto.trace.Span`
    :returns: Protobuf format span.
    """

    if not span_data:
        return None

    pb_span = trace_pb2.Span(
        name=trace_pb2.TruncatableString(value=span_data.name),
        kind=span_data.span_kind,
        trace_id=hex_str_to_bytes_str(span_data.context.trace_id),
        span_id=hex_str_to_bytes_str(span_data.span_id),
        parent_span_id=hex_str_to_bytes_str(span_data.parent_span_id)
        if span_data.parent_span_id is not None else None,
        start_time=proto_ts_from_datetime_str(span_data.start_time),
        end_time=proto_ts_from_datetime_str(span_data.end_time),
        status=trace_pb2.Status(code=span_data.status.code,
                                message=span_data.status.message)
        if span_data.status is not None else None,
        same_process_as_parent_span=BoolValue(
            value=span_data.same_process_as_parent_span)
        if span_data.same_process_as_parent_span is not None else None,
        child_span_count=UInt32Value(value=span_data.child_span_count)
        if span_data.child_span_count is not None else None)

    # attributes
    if span_data.attributes is not None:
        for attribute_key, attribute_value \
                in span_data.attributes.items():
            add_proto_attribute_value(pb_span.attributes, attribute_key,
                                      attribute_value)

    # time events
    if span_data.time_events is not None:
        for span_data_event in span_data.time_events:
            if span_data_event.message_event is not None:
                pb_event = pb_span.time_events.time_event.add()
                pb_event.time.FromJsonString(span_data_event.timestamp)
                set_proto_message_event(pb_event.message_event,
                                        span_data_event.message_event)
            elif span_data_event.annotation is not None:
                pb_event = pb_span.time_events.time_event.add()
                pb_event.time.FromJsonString(span_data_event.timestamp)
                set_proto_annotation(pb_event.annotation,
                                     span_data_event.annotation)

    # links
    if span_data.links is not None:
        for link in span_data.links:
            pb_link = pb_span.links.link.add(
                trace_id=hex_str_to_bytes_str(link.trace_id),
                span_id=hex_str_to_bytes_str(link.span_id),
                type=link.type)

            if link.attributes is not None and \
                    link.attributes.attributes is not None:
                for attribute_key, attribute_value \
                        in link.attributes.attributes.items():
                    add_proto_attribute_value(pb_link.attributes,
                                              attribute_key, attribute_value)

    # tracestate
    if span_data.context.tracestate is not None:
        for (key, value) in span_data.context.tracestate.items():
            pb_span.tracestate.entries.add(key=key, value=value)

    return pb_span
Пример #16
0
    def testGetTrainingRunDetails(self):
        client = Mock()

        options = [['data_split_column', 'column_name_1'],
                   ['data_split_eval_fraction', 0.3], ['data_split_method', 1],
                   ['distance_type',
                    1], ['early_stop', BoolValue(value=False)],
                   ['initial_learn_rate', 0.1],
                   ['input_label_columns', ['column1', 'column2']],
                   ['kmeans_initialization_column', 'column_name_2'],
                   ['kmeans_initialization_method', 1],
                   ['l1_regularization',
                    DoubleValue(value=0.5)],
                   ['l2_regularization',
                    DoubleValue(value=0.6)],
                   ['label_class_weights', [0.2, 0.3]], ['learn_rate', 0.7],
                   ['learn_rate_strategy', 1], ['loss_type', 1],
                   ['max_iterations', 20],
                   ['min_relative_progress',
                    DoubleValue(value=0.4)], ['model_uri', 'model.uri.string'],
                   ['num_clusters', 7], ['optimization_strategy', 1],
                   ['warm_start', BoolValue(value=True)]]

        mocked_options = []
        for option in options:
            option_mock = Mock()
            option_mock.name = option[0]
            mocked_options.append([option_mock, option[1]])

        training_options = Mock()
        training_options.ListFields = Mock(return_value=mocked_options)

        training_run = Mock(training_options=training_options,
                            results=['result', 'result', 'result'])

        model = Mock(training_runs=[training_run])

        client.get_model = Mock(return_value=model)
        bigquery = BigQueryService(client)

        expected = {
            'details': {
                'actual_iterations':
                3,
                'data_split_column':
                'column_name_1',
                'data_split_eval_fraction':
                0.3,
                'data_split_method':
                Model.DataSplitMethod(1).name,
                'distance_type':
                Model.DistanceType(1).name,
                'early_stop':
                'False',
                'initial_learn_rate':
                0.1,
                'input_label_columns':
                "['column1', 'column2']",
                'kmeans_initialization_column':
                'column_name_2',
                'kmeans_initialization_method':
                Model.KmeansEnums.KmeansInitializationMethod(1).name,
                'l1_regularization':
                0.5,
                'l2_regularization':
                0.6,
                'label_class_weights':
                '[0.2, 0.3]',
                'learn_rate':
                0.7,
                'learn_rate_strategy':
                Model.LearnRateStrategy(1).name,
                'loss_type':
                Model.LossType(1).name,
                'max_iterations':
                20,
                'min_relative_progress':
                0.4,
                'model_uri':
                'model.uri.string',
                'num_clusters':
                7,
                'optimization_strategy':
                Model.OptimizationStrategy(1).name,
                'warm_start':
                'True'
            }
        }

        result = bigquery.get_training_run_details('some_model_id', 0)
        self.assertEqual(expected, result)
Пример #17
0
 def Probe(self, request, context):
     return csi_pb2.ProbeResponse(ready=BoolValue(value=True))
Пример #18
0
    def __init__(self):  # pylint: disable=R0912,R0915
        """
        Returns a new instance of config_pb2.AgentConfig when a new AgentConfig() is created.
        If 'HT_CONFIG_FILE' is specified in the environment data would be loaded from that file.
        If not, data would be loaded from 'DEFAULT_AGENT_CONFIG' on 'default.py'
        """

        config_dict = DEFAULT_AGENT_CONFIG
        custom_config = {}
        file_dict = _read_from_file()
        if file_dict is not None:
            config_dict = merge_config(config_dict, file_dict)

        env_dict = load_config_from_env()
        config_dict = merge_config(config_dict, env_dict)

        self.custom_config = _apply_custom_config_options(
            custom_config, config_dict)

        # transform(to not break old support) like zipkin != proto def of ZIPKIN
        _transform_values(config_dict)
        logger.info("Config init complete - config state: %s", config_dict)

        # Create protobuf Reporting object
        reporting = jf.Parse(jf.MessageToJson(config_pb2.Reporting()),
                             config_pb2.Reporting)
        reporting.endpoint = config_dict['reporting']['endpoint']
        reporting.secure = config_dict['reporting']['secure']
        reporting.token = config_dict['reporting']['token']

        # Set trace_reporter_type
        if config_dict['reporting']['trace_reporter_type'] == 'OTLP':
            reporting.trace_reporter_type = config_pb2.TraceReporterType.OTLP
        elif config_dict['reporting']['trace_reporter_type'] == 'ZIPKIN':
            reporting.trace_reporter_type = config_pb2.TraceReporterType.ZIPKIN
        else:
            # Default to ZIPKIN
            reporting.trace_reporter_type = config_pb2.TraceReporterType.OTLP

        # Create DataCapture Message components
        rpc_body = config_pb2.Message(
            request=BoolValue(
                value=config_dict['data_capture']['rpc_body']['request']),
            response=BoolValue(
                value=config_dict['data_capture']['rpc_body']['response']))
        rpc_metadata = config_pb2.Message(
            request=BoolValue(
                value=config_dict['data_capture']['rpc_metadata']['request']),
            response=BoolValue(
                value=config_dict['data_capture']['rpc_metadata']['response']))
        http_body = config_pb2.Message(
            request=BoolValue(
                value=config_dict['data_capture']['http_body']['request']),
            response=BoolValue(
                value=config_dict['data_capture']['http_body']['response']))
        http_headers = config_pb2.Message(
            request=BoolValue(
                value=config_dict['data_capture']['http_headers']['request']),
            response=BoolValue(
                value=config_dict['data_capture']['http_headers']['response']))

        # Create Protobuf DataCapture object
        data_capture = jf.Parse(jf.MessageToJson(config_pb2.DataCapture()),
                                config_pb2.DataCapture)
        data_capture.http_headers = http_headers
        data_capture.http_body = http_body
        data_capture.rpc_metadata = rpc_metadata
        data_capture.rpc_body = rpc_body
        data_capture.body_max_size_bytes = config_dict['data_capture'][
            'body_max_size_bytes']

        # Create Protobuf AgentConfig object
        self.agent_config: config_pb2.AgentConfig = jf.Parse(
            jf.MessageToJson(config_pb2.AgentConfig()), config_pb2.AgentConfig)
        self.agent_config.service_name = config_dict['service_name']
        self.agent_config.reporting = reporting
        self.agent_config.data_capture = data_capture
        tmp_propagation_formats = []
        if 'TRACECONTEXT' in config_dict['propagation_formats']:
            tmp_propagation_formats.append(
                config_pb2.PropagationFormat.TRACECONTEXT)
            tmp_propagation_formats = list(set(tmp_propagation_formats))
        if 'B3' in config_dict['propagation_formats']:
            tmp_propagation_formats.append(config_pb2.PropagationFormat.B3)
            tmp_propagation_formats = list(set(tmp_propagation_formats))
        if not tmp_propagation_formats:
            # Default to TRACECONTEXT
            tmp_propagation_formats.append(
                config_pb2.PropagationFormat.TRACECONTEXT)
        self.agent_config.propagation_formats = tmp_propagation_formats
        self.agent_config.enabled = config_dict['enabled']

        self.agent_config.resource_attributes = config_dict[
            'resource_attributes']