Exemple #1
0
class HubMotor:
    def __init__(
        self,
        name,
        wheel_radius,
        gear_ratio,
        poll_pairs,
        can_node_id,
        can_socket,
    ):
        self.name = name
        self.can_node_id = can_node_id
        self.can_socket = can_socket
        self.wheel_radius = wheel_radius
        self.gear_ratio = gear_ratio
        self.poll_pairs = poll_pairs
        self.max_current = 20
        self._event_bus = get_event_bus(self.name)
        self._latest_state = motor_pb2.MotorControllerState()
        self._latest_stamp = Timestamp()
        self.can_socket.add_reader(self._handle_can_message)
        self._last_tachometer_stamp = None
        self._delta_time_seconds = 0.0
        self._average_delta_time = 0.0

    def _handle_can_message(self, cob_id, data, stamp):
        can_node_id = (cob_id & 0xff)
        if can_node_id != self.can_node_id:
            return
        command = (cob_id >> 8) & 0xff
        parser = g_vesc_msg_parsers.get(command, None)
        if parser is None:
            logger.warning(
                'No parser for command :%x node id: %x',
                command,
                can_node_id,
            )
            return
        logger.debug('can node id %02x', can_node_id)
        parser(self._latest_state, data)
        self._latest_stamp.CopyFrom(stamp)

        if command == VESC_STATUS_MSG_5:
            if self._last_tachometer_stamp is not None:
                self._delta_time_seconds = (
                    self._latest_stamp.ToMicroseconds() -
                    self._last_tachometer_stamp.ToMicroseconds()) * 1e-6
                self._average_delta_time = self._average_delta_time * (
                    0.9) + self._delta_time_seconds * 0.1
            else:
                self._last_tachometer_stamp = Timestamp()

            self._last_tachometer_stamp.CopyFrom(self._latest_stamp)

            # only log on the 5th vesc message, as we have complete state at that point.
            event = make_event('%s/state' % self.name,
                               self._latest_state,
                               stamp=self._latest_stamp)
            self._event_bus.send(event)

    def _send_can_command(self, command, data):
        cob_id = int(self.can_node_id) | (command << 8)
        # print('send %x'%cob_id, '%x'%socket.CAN_EFF_FLAG)
        # socket.CAN_EFF_FLAG for some reason on raspberry pi this is
        # the wrong value (-0x80000000 )
        eff_flag = 0x80000000
        self.can_socket.send(cob_id, data, flags=eff_flag)

    def _tach_to_rads(self, er):
        '''compute radians from electric revs'''
        rotations = er / (self.poll_pairs * self.gear_ratio)
        return rotations * 2 * math.pi

    def average_update_rate(self):
        return 1.0 / max(self._average_delta_time, 1e-6)

    def tachometer_rads(self):
        return self._tach_to_rads(self._latest_state.tachometer.value)

    def velocity_rads(self):
        return self._tach_to_rads(self._latest_state.rpm.value) / 60.0

    def send_rpm_command(self, rpm):
        RPM_FORMAT = '>i'  # big endian, int32
        erpm = rpm * self.poll_pairs * self.gear_ratio
        self._latest_state.commanded_rpm.value = int(erpm)
        self._latest_state.ClearField('commanded_brake_current')
        data = struct.pack(RPM_FORMAT, int(erpm))
        self._send_can_command(VESC_SET_RPM, data)

    def send_velocity_command(self, velocity_m_s):
        rpm = 60 * velocity_m_s / (self.wheel_radius * 2 * math.pi)
        self.send_rpm_command(rpm)

    def send_velocity_command_rads(self, rads_second):
        rpm = 60 * rads_second / (2 * math.pi)
        self.send_rpm_command(rpm)

    def send_current_command(self, current_amps):
        CURRENT_FORMAT = '>i'  # big endian, int32
        data = struct.pack(
            CURRENT_FORMAT,
            int(
                1000 * max(
                    min(current_amps, self.max_current),
                    -self.max_current,
                ), ),
        )
        self._send_can_command(VESC_SET_CURRENT, data)

    def send_current_brake_command(self, current_amps):
        CURRENT_BRAKE_FORMAT = '>i'  # big endian, int32

        self._latest_state.ClearField('commanded_rpm')
        self._latest_state.commanded_brake_current.value = current_amps

        data = struct.pack(
            CURRENT_BRAKE_FORMAT,
            int(1000 * np.clip(current_amps, 0, self.max_current), ),
        )
        self._send_can_command(VESC_SET_CURRENT_BRAKE, data)

    def get_state(self):
        return self._latest_state
Exemple #2
0
def _FormatTimestamp(timestamp: timestamp_pb2.Timestamp) -> str:
    dt = timestamp.ToDatetime()
    return dt.strftime("%Y-%m-%d %H:%M:%S")
Exemple #3
0
def _detect_schema_and_feature(entity, owner, id_column, feature_columns,
                               timestamp_column, timestamp_value,
                               serving_store, warehouse_store, df):
    """Create schema object for import spec.
    
    Args:
        entity (str): entity name
        id_column (str): column name of entity id
        timestamp_column (str): column name of timestamp
        timestamp_value (datetime.datetime): timestamp to apply to all 
            rows in dataset
        feature_columns (str): list of column to be extracted
        df (pandas.Dataframe): pandas dataframe of the data
        serving_store (feast.sdk.resources.feature.DataStore): Defaults to None.
            Serving store to write the features in this instance to.
        warehouse_store (feast.sdk.resources.feature.DataStore): Defaults to None.
            Warehouse store to write the features in this instance to.

    Returns:
        feast.specs.ImportSpec_pb2.Schema: schema of the data
        dict of str: feast.specs.FeatureSpec_pb2.FeatureSpec: features in the data

    Raises:
        Exception -- [description]
    """

    schema = Schema()
    if id_column is not None:
        schema.entityIdColumn = id_column
    elif entity in df.columns:
        schema.entityIdColumn = entity
    else:
        raise ValueError("Column with name {} is not found".format(entity))

    if timestamp_column is not None:
        schema.timestampColumn = timestamp_column
    else:
        if timestamp_value is None:
            ts = Timestamp()
            ts.GetCurrentTime()
        else:
            ts = Timestamp(
                seconds=int((timestamp_value -
                             datetime.datetime(1970, 1, 1)).total_seconds()))
        schema.timestampValue.CopyFrom(ts)

    features = {}
    if feature_columns is not None:
        # check if all column exist and create feature accordingly
        for column in feature_columns:
            if column not in df.columns:
                raise ValueError(
                    "Column with name {} is not found".format(column))
            features[column] = _create_feature(df[column], entity, owner,
                                               serving_store, warehouse_store)
    else:
        # get all column except entity id and timestampColumn
        feature_columns = list(df.columns.values)
        _remove_safely(feature_columns, schema.entityIdColumn)
        _remove_safely(feature_columns, schema.timestampColumn)
        for column in feature_columns:
            features[column] = _create_feature(df[column], entity, owner,
                                               serving_store, warehouse_store)

    for col in df.columns:
        field = schema.fields.add()
        field.name = col
        if col in features:
            field.featureId = features[col].id

    features_dict = {}
    for k in features:
        features_dict[features[k].id] = features[k]

    return schema, features_dict
Exemple #4
0
def encode_attribute_event_time(dt: datetime.datetime) -> str:
    ts = Timestamp()
    ts.FromDatetime(dt)
    return ts.ToJsonString()
    def test_translate_to_collector(self):
        test_metric = self._meter.create_counter("testcollector", "testdesc",
                                                 "unit", int,
                                                 self._labels.keys())
        aggregator = aggregate.SumAggregator()
        aggregator.update(123)
        aggregator.take_checkpoint()
        record = ExportRecord(
            test_metric,
            self._key_labels,
            aggregator,
            metrics.get_meter_provider().resource,
        )
        start_timestamp = Timestamp()
        output_metrics = metrics_exporter.translate_to_collector(
            [record],
            start_timestamp,
        )
        self.assertEqual(len(output_metrics), 1)
        self.assertIsInstance(output_metrics[0], metrics_pb2.Metric)
        self.assertEqual(output_metrics[0].metric_descriptor.name,
                         "testcollector")
        self.assertEqual(output_metrics[0].metric_descriptor.description,
                         "testdesc")
        self.assertEqual(output_metrics[0].metric_descriptor.unit, "unit")
        self.assertEqual(
            output_metrics[0].metric_descriptor.type,
            metrics_pb2.MetricDescriptor.CUMULATIVE_INT64,
        )
        self.assertEqual(len(output_metrics[0].metric_descriptor.label_keys),
                         2)
        self.assertEqual(
            output_metrics[0].metric_descriptor.label_keys[0].key,
            "environment",
        )
        self.assertEqual(
            output_metrics[0].metric_descriptor.label_keys[1].key,
            "number",
        )

        self.assertIsNotNone(output_metrics[0].resource)
        self.assertEqual(
            output_metrics[0].resource.type,
            "",
        )
        self.assertEqual(
            output_metrics[0].resource.labels["key_with_str_value"],
            self._resource_labels["key_with_str_value"],
        )
        self.assertIsInstance(
            output_metrics[0].resource.labels["key_with_int_val"],
            str,
        )
        self.assertEqual(
            output_metrics[0].resource.labels["key_with_int_val"],
            str(self._resource_labels["key_with_int_val"]),
        )
        self.assertIsInstance(
            output_metrics[0].resource.labels["key_with_true"],
            str,
        )
        self.assertEqual(
            output_metrics[0].resource.labels["key_with_true"],
            str(self._resource_labels["key_with_true"]),
        )

        self.assertEqual(len(output_metrics[0].timeseries), 1)
        self.assertEqual(len(output_metrics[0].timeseries[0].label_values), 2)
        self.assertEqual(output_metrics[0].timeseries[0].start_timestamp,
                         start_timestamp)
        self.assertEqual(
            output_metrics[0].timeseries[0].label_values[0].has_value, True)
        self.assertEqual(output_metrics[0].timeseries[0].label_values[0].value,
                         "staging")
        self.assertEqual(len(output_metrics[0].timeseries[0].points), 1)
        self.assertEqual(
            output_metrics[0].timeseries[0].points[0].timestamp.seconds,
            record.aggregator.last_update_timestamp // 1000000000,
        )
        self.assertEqual(
            output_metrics[0].timeseries[0].points[0].timestamp.nanos,
            record.aggregator.last_update_timestamp % 1000000000,
        )
        self.assertEqual(output_metrics[0].timeseries[0].points[0].int64_value,
                         123)
Exemple #6
0
 def _proto_timestamp_to_pandas_time(
     timestamp: timestamp_pb2.Timestamp, ) -> pd.Timestamp:
     """Convert a protobuf Timestamp to a pandas Timestamp."""
     pts = pd.Timestamp(timestamp.ToJsonString())
     return TimeSeriesApi._convert_utc(pts)
Exemple #7
0
def test_write_read():
    """Test writing a data to a file, and reading it back."""
    file_annotations = {'robot': 'spot', 'individual': 'spot-BD-99990001'}
    channel_annotations = {'ccc': '3', 'd': '4444'}
    filename = os.path.join(tempfile.gettempdir(), 'test1.bdf')
    series1_type = 'bosdyn/test/1'
    series1_spec = {'channel': 'channel_a'}
    series1_content_type = 'text/plain'
    series1_additional_indexes = ['idxa', 'idxb']
    timestamp_nsec = now_nsec()
    msg_data = b'This is some data'
    operator_comment = OperatorComment(message="End of test",
                                       timestamp=now_timestamp())
    pod_series_type = 'bosdyn/test/pod'
    pod_spec = {'varname': 'test_var'}

    # Test writing the file.
    with open(filename, 'wb') as outfile, \
         DataWriter(outfile, annotations=file_annotations) as data_writer:
        # Write generic message data to the file.
        series1_index = data_writer.add_message_series(
            series1_type,
            series1_spec,
            series1_content_type,
            'test_type',
            annotations=channel_annotations,
            additional_index_names=series1_additional_indexes)
        data_writer.write_data(series1_index, timestamp_nsec, msg_data, [1, 2])

        # Write a protobuf to the file.
        proto_writer = ProtoSeriesWriter(data_writer, OperatorComment)
        proto_writer.write(timestamp_to_nsec(operator_comment.timestamp),
                           operator_comment)

        # Write POD data (floats) to the file.
        pod_writer = PodSeriesWriter(data_writer,
                                     pod_series_type,
                                     pod_spec,
                                     bddf.TYPE_FLOAT32,
                                     annotations={'units': 'm/s^2'})
        for val in range(10, 20):
            pod_writer.write(timestamp_nsec, val)

    # Test reading the file.
    with open(filename, 'rb') as infile, DataReader(infile) as data_reader:
        # Check the file version number.
        assert data_reader.version.major_version == 1
        assert data_reader.version.minor_version == 0
        assert data_reader.version.patch_level == 0
        assert data_reader.annotations == file_annotations

        expected_timestamp = Timestamp()
        expected_timestamp.FromNanoseconds(timestamp_nsec)

        assert data_reader.series_block_index(
            0).block_entries[0].timestamp == expected_timestamp
        assert data_reader.series_block_index(
            0).block_entries[0].additional_indexes[0] == 1
        assert data_reader.series_block_index(
            0).block_entries[0].additional_indexes[1] == 2

        # Check that there are 3 series in the file.
        assert len(data_reader.file_index.series_identifiers) == 3

        # Read generic message data from the file.
        series_a_index = data_reader.series_spec_to_index(series1_spec)
        assert data_reader.num_data_blocks(series_a_index) == 1
        assert data_reader.total_bytes(series_a_index) == len(msg_data)
        _desc, timestamp_, data_ = data_reader.read(series_a_index, 0)
        assert timestamp_ == timestamp_nsec
        assert data_ == msg_data
        assert _desc.timestamp == expected_timestamp
        assert _desc.additional_indexes[0] == 1
        assert _desc.additional_indexes[1] == 2

        # Read a protobuf from the file.
        proto_reader = ProtobufReader(data_reader)
        operator_comment_reader = ProtobufChannelReader(
            proto_reader, OperatorComment)
        assert operator_comment_reader.num_messages == 1
        timestamp_, protobuf = operator_comment_reader.get_message(0)
        assert protobuf == operator_comment
        assert timestamp_ == timestamp_to_nsec(operator_comment.timestamp)

        # Read POD (float) data from the file.
        with pytest.raises(ValueError):
            pod_reader = PodSeriesReader(data_reader, {'spec': 'bogus'})
        pod_reader = PodSeriesReader(data_reader, pod_spec)
        assert pod_reader.pod_type.pod_type == bddf.TYPE_FLOAT32
        assert pod_reader.series_descriptor.annotations['units'] == 'm/s^2'
        assert pod_reader.num_data_blocks == 1

        timestamp_, samples = pod_reader.read_samples(0)
        assert timestamp_ == timestamp_nsec
        assert samples == [float(val) for val in range(10, 20)]

    with open(filename,
              'rb') as infile, StreamDataReader(infile) as data_reader:
        # Check the file version number.
        assert data_reader.version.major_version == 1
        assert data_reader.version.minor_version == 0
        assert data_reader.version.patch_level == 0
        assert data_reader.annotations == file_annotations

        desc_, sdesc_, data_ = data_reader.read_data_block()
        assert desc_.timestamp == expected_timestamp
        assert desc_.additional_indexes[0] == 1
        assert desc_.additional_indexes[1] == 2
        assert sdesc_.message_type.content_type == series1_content_type
        assert sdesc_.message_type.type_name == 'test_type'
        assert data_ == msg_data

        desc_, sdesc_, data_ = data_reader.read_data_block()
        assert desc_.timestamp == operator_comment.timestamp
        assert sdesc_.message_type.content_type == 'application/protobuf'
        assert sdesc_.message_type.type_name == OperatorComment.DESCRIPTOR.full_name
        dec_msg = OperatorComment()
        dec_msg.ParseFromString(data_)
        assert dec_msg == operator_comment

        desc_, sdesc_, data_ = data_reader.read_data_block()
        assert desc_.timestamp == expected_timestamp
        assert sdesc_.pod_type.pod_type == bddf.TYPE_FLOAT32

        assert not data_reader.eof

        with pytest.raises(EOFError):
            data_reader.read_data_block()

        assert data_reader.eof

        # Check that there are 3 series in the file.
        assert len(data_reader.file_index.series_identifiers) == 3

        assert data_reader.series_block_indexes[0].block_entries[
            0].timestamp == expected_timestamp
        assert data_reader.series_block_index(
            0).block_entries[0].additional_indexes[0] == 1
        assert data_reader.series_block_index(
            0).block_entries[0].additional_indexes[1] == 2

        assert (data_reader.file_index.series_identifiers ==
                data_reader.stream_file_index.series_identifiers)

    os.unlink(filename)
def test_dt2ts():
    dt = datetime.strptime('21/11/06 16:30', '%d/%m/%y %H:%M')

    assert dt2ts(None) is None
    assert dt2ts(dt) == Timestamp(seconds=1164126600)
Exemple #9
0
 def convertIntoTimestamp(self, old_date):
     ts = Timestamp()
     new_date = datetime.combine(old_date, datetime.min.time())
     ts.FromDateTime(new_date)
     return ts
Exemple #10
0
def datetime_to_timestamp(dt):
    ts = Timestamp()
    ts.FromDatetime(dt)
    return ts
Exemple #11
0
def now_timestamp():
    """Returns a google.protobuf.Timestamp set to the current time on the system clock."""
    now = now_nsec()
    timestamp_proto = Timestamp()
    set_timestamp_from_nsec(timestamp_proto, now)
    return timestamp_proto
def dt_to_pb(dt):
    pb = Timestamp()
    pb.FromDatetime(dt)
    return pb
import json
import grpc
from google.protobuf.timestamp_pb2 import Timestamp

import foobar_pb2
import foobar_pb2_grpc

channel = grpc.insecure_channel("localhost:4040")

stub = foobar_pb2_grpc.FoobarHandlerStub(channel)

timestamp = Timestamp()
# Store Foobar
fb = foobar_pb2.Foobar(
    FoobarContent="Python as Client here!!!",
    CreatedAt=timestamp.GetCurrentTime(),
    UpdatedAt=timestamp.GetCurrentTime(),
)
created_foobar = stub.Store(fb)
print("\nCreated Foobars\n")
print(created_foobar)

# Get All Foobars
get_list_foobar = foobar_pb2.FetchRequest(num=1)

list_foobar = stub.GetListFoobar(get_list_foobar)

print("Foobars\n")
for f_bar in list_foobar.Foobars:
    print(f_bar)
Exemple #14
0
 def schedule_time_proto(self) -> Timestamp:
     proto_timestamp = Timestamp()
     proto_timestamp.FromDatetime(self.schedule_time)
     return proto_timestamp
Exemple #15
0
    def GetOnlineFeatures(self, request: GetOnlineFeaturesRequest, context):

        response = GetOnlineFeaturesResponse(feature_data_sets=[
            GetOnlineFeaturesResponse.FeatureDataSet(
                name="feature_set_1",
                version="1",
                feature_rows=[
                    FeatureRowProto.FeatureRow(
                        feature_set="feature_set_1",
                        event_timestamp=Timestamp(),
                        fields=[
                            FieldProto.Field(
                                name="feature_1",
                                value=ValueProto.Value(float_val=1.2),
                            ),
                            FieldProto.Field(
                                name="feature_2",
                                value=ValueProto.Value(float_val=1.2),
                            ),
                            FieldProto.Field(
                                name="feature_3",
                                value=ValueProto.Value(float_val=1.2),
                            ),
                        ],
                    ),
                    FeatureRowProto.FeatureRow(
                        feature_set="feature_set_1",
                        event_timestamp=Timestamp(),
                        fields=[
                            FieldProto.Field(
                                name="feature_1",
                                value=ValueProto.Value(float_val=1.2),
                            ),
                            FieldProto.Field(
                                name="feature_2",
                                value=ValueProto.Value(float_val=1.2),
                            ),
                            FieldProto.Field(
                                name="feature_3",
                                value=ValueProto.Value(float_val=1.2),
                            ),
                        ],
                    ),
                    FeatureRowProto.FeatureRow(
                        feature_set="feature_set_1",
                        event_timestamp=Timestamp(),
                        fields=[
                            FieldProto.Field(
                                name="feature_1",
                                value=ValueProto.Value(float_val=1.2),
                            ),
                            FieldProto.Field(
                                name="feature_2",
                                value=ValueProto.Value(float_val=1.2),
                            ),
                            FieldProto.Field(
                                name="feature_3",
                                value=ValueProto.Value(float_val=1.2),
                            ),
                        ],
                    ),
                ],
            )
        ])

        return response