示例#1
0
def example_1():
    classroom = get_or_create_classroom(STREAM_TYPE_STRING)

    student = {
        'name': 'Bob',
        'student_id': 12,
        'age': 21,
    }
    datapoint = DataPoint(data=json.dumps(student))
    classroom.write(datapoint)

    students = [{
        'name': 'James',
        'student_id': 13,
        'age': 22,
    }, {
        'name': 'Henry',
        'student_id': 14,
        'age': 20,
    }]
    datapoints = [DataPoint(data=json.dumps(x)) for x in students]
    classroom.bulk_write_datapoints(datapoints)

    most_recent_dp = classroom.get_current_value()
    print json.loads(most_recent_dp.get_data())['name']
    def test_json_encode_to_xml(self):
        my_dict = {'key1': 'value1', '2': 2, 'key3': [1, 2, 3]}
        dp = DataPoint(
            data_type=STREAM_TYPE_JSON,
            data=my_dict,
        )
        xml = dp.to_xml()

        self.assertIsNotNone(re.search('<data>\{[ ",a-zA-Z0-9:[\]]+\}</data>', xml))
        self.assertIsNotNone(re.search('"key1": "value1"', xml))
        self.assertIsNotNone(re.search('"2": 2', xml))
        self.assertIsNotNone(re.search('"key3": \[1, 2, 3\]', xml))
示例#3
0
    def test_json_encode_to_xml(self):
        my_dict = {'key1': 'value1', '2': 2, 'key3': [1, 2, 3]}
        dp = DataPoint(
            data_type=STREAM_TYPE_JSON,
            data=my_dict,
        )
        xml = dp.to_xml()

        self.assertIsNotNone(
            re.search('<data>\{[ ",a-zA-Z0-9:[\]]+\}</data>', xml))
        self.assertIsNotNone(re.search('"key1": "value1"', xml))
        self.assertIsNotNone(re.search('"2": 2', xml))
        self.assertIsNotNone(re.search('"key3": \[1, 2, 3\]', xml))
示例#4
0
 def test_get_data_no_conversion(self):
     # This is to prove that the issue of calling the DSTREAM_TYPE_MAP conversion
     # methods is no longer done on the get_data call.  Previously this could
     # result in already converted data trying to be converted again.  For most
     # types this is not an issue (i.e. calling float on a float) however, some
     # conversions could have typing issues when run on their own results.
     old_float_conversion = DSTREAM_TYPE_MAP[STREAM_TYPE_FLOAT]
     mfloat = mock.Mock(side_effect=float)
     DSTREAM_TYPE_MAP[STREAM_TYPE_FLOAT] = (mfloat, str)
     my_float = 3.14159265358
     dp = DataPoint(data_type=STREAM_TYPE_FLOAT, data=my_float, quality=0)
     self.assertEqual(my_float, dp.get_data())
     self.assertFalse(mfloat.called)
     DSTREAM_TYPE_MAP[STREAM_TYPE_FLOAT] = old_float_conversion
def create_and_use_json_stream(dc):
    # get a test stream reference
    test_stream = dc.streams.get_stream_if_exists("test-json")

    # we want a clean stream to work with.  If the stream exists, nuke it
    if test_stream is not None:
        test_stream.delete()

    test_stream = dc.streams.create_stream(
        stream_id="test-json",
        data_type=STREAM_TYPE_JSON,
        description='a stream used for testing json',
        units='international json standard unit (IJSU)',
    )

    test_stream.write(
        DataPoint(
            data_type=STREAM_TYPE_JSON,
            data={
                'key1': 'value1',
                2: 2,
                'key3': [1, 2, 3]
            },
            description="Some JSON data in IJSUs",
        ))

    time.sleep(5)

    print(test_stream.get_current_value())

    test_stream.delete()
示例#6
0
def test_http_monitor(dc):
    # Create a fresh monitor over a pretty broad set of topics
    topics = ['DeviceCore', 'FileDataCore', 'FileData', 'DataPoint']
    mon = dc.monitor.get_monitor(topics)
    if mon is not None:
        mon.delete()
    mon = dc.monitor.create_http_monitor(topics,
                                         'http://digi.com',
                                         transport_token=None,
                                         transport_method='PUT',
                                         connect_timeout=0,
                                         response_timeout=0,
                                         batch_size=1,
                                         batch_duration=0,
                                         compression='none',
                                         format_type='json')
    pprint.pprint(mon.get_metadata())

    def listener(data):
        pprint.pprint(data)
        return True  # we got it!

    test_stream = dc.streams.get_stream("test")
    try:
        while True:
            test_stream.write(DataPoint(random.random()))
            time.sleep(3.14)
    except KeyboardInterrupt:
        print("Shutting down threads...")
def create_stream_and_delete(dc):
    # get a test stream reference
    test_stream = dc.streams.get_stream_if_exists("test")

    # we want a clean stream to work with.  If the stream exists, nuke it
    if test_stream is not None:
        test_stream.delete()

    test_stream = dc.streams.create_stream(
        stream_id="test",
        data_type='float',
        description='a stream used for testing',
        units='some-unit',
    )

    for i in range(5):
        test_stream.write(
            DataPoint(data=i * pi, description="This is {} * pi".format(i)))

    for i, stream in enumerate(test_stream.read()):
        print("{}, {!r}".format(i + 1, stream))

    raw_input("We wrote some points to the cloud, go check it out!")

    # now cleanup by deleting the stream
    test_stream.delete()
示例#8
0
    def test_write_full(self):
        self.prepare_response("POST",
                              "/ws/DataPoint/test",
                              CREATE_DATAPOINT_RESPONSE,
                              status=201)
        test_stream = self._get_stream(GET_TEST_DATA_STREAM)
        test_stream.write(
            DataPoint(
                data=123.4,
                description="Best Datapoint Ever?",
                timestamp=datetime.datetime(2014, 7, 7, 14, 10, 34),
                quality=99,
                location=(99, 88, 77),
                units="scolvilles",
            ))

        # verify that the body sent to the device cloud is sufficiently minimal
        self.assertEqual(
            httpretty.last_request().body,
            six.
            b('<DataPoint>'
              '<streamId>test</streamId>'
              '<data>123.4</data>'
              '<description>Best Datapoint Ever?</description>'
              '<timestamp>2014-07-07T14:10:34Z</timestamp>'  # TODO: does this need to include tz?
              '<quality>99</quality>'
              '<location>99,88,77</location>'
              '<streamUnits>scolvilles</streamUnits>'
              '</DataPoint>'))
 def test_rollup_datapoint(self):
     self.prepare_response("GET", "/ws/DataStream/test", GET_TEST_DATA_STREAM)
     example_json = {
         "id": "07d77854-0557-11e4-ab44-fa163e7ebc6b",
         "timestamp": "1404683207981",
         "timestampISO": "2014-07-06T21:46:47.981Z",
         "serverTimestamp": "1404683207981",
         "serverTimestampISO": "2014-07-06T21:46:47.981Z",
         "data": "0.0",
         "description": "Test",
         "quality": "20",
         "location": "1.0,2.0,3.0"
     }
     stream = self._get_stream("test", with_cached_data=True)
     dp = DataPoint.from_rollup_json(stream, example_json)
     self.assertEqual(dp.get_data(), 0.0)
     orig_dt = dp.get_timestamp()
     dt_wo_ms = datetime.datetime(year=orig_dt.year,
                                  month=orig_dt.month,
                                  day=orig_dt.day,
                                  hour=orig_dt.hour,
                                  minute=orig_dt.minute,
                                  second=orig_dt.second,
                                  tzinfo=orig_dt.tzinfo)
     self.assertEqual(six.b(dt_wo_ms.isoformat()),
                      six.b('2014-07-06T21:46:47+00:00'))
示例#10
0
 def test_rollup_datapoint(self):
     self.prepare_response("GET", "/ws/DataStream/test",
                           GET_TEST_DATA_STREAM)
     example_json = {
         "id": "07d77854-0557-11e4-ab44-fa163e7ebc6b",
         "timestamp": "1404683207981",
         "timestampISO": "2014-07-06T21:46:47.981Z",
         "serverTimestamp": "1404683207981",
         "serverTimestampISO": "2014-07-06T21:46:47.981Z",
         "data": "0.0",
         "description": "Test",
         "quality": "20",
         "location": "1.0,2.0,3.0"
     }
     stream = self._get_stream("test", with_cached_data=True)
     dp = DataPoint.from_rollup_json(stream, example_json)
     self.assertEqual(dp.get_data(), 0.0)
     orig_dt = dp.get_timestamp()
     dt_wo_ms = datetime.datetime(year=orig_dt.year,
                                  month=orig_dt.month,
                                  day=orig_dt.day,
                                  hour=orig_dt.hour,
                                  minute=orig_dt.minute,
                                  second=orig_dt.second,
                                  tzinfo=orig_dt.tzinfo)
     self.assertEqual(six.b(dt_wo_ms.isoformat()),
                      six.b('2014-07-06T21:46:47+00:00'))
示例#11
0
    def test_basic_nonbulk_stream_operations(self):
        #
        # This test verifiest that we can perform a number of simple operations on
        # a data stream (non-bulk).  The ops are create, write, read, and delete
        #
        SID = "pythondc-inttest/test_basic_nonbulk_stream_operations"

        # get a test stream reference
        test_stream = self._dc.streams.get_stream_if_exists(SID)

        # we want a clean stream to work with.  If the stream exists, nuke it
        if test_stream is not None:
            test_stream.delete()

        test_stream = self._dc.streams.create_stream(
            stream_id=SID,
            data_type='float',
            description='a stream used for testing',
            units='some-unit',
        )

        for i in range(5):
            test_stream.write(
                DataPoint(data=i * pi,
                          description="This is {} * pi".format(i)))

        for i, dp in enumerate(test_stream.read(newest_first=False)):
            self.assertAlmostEqual(dp.get_data(), i * pi)

        # now cleanup by deleting the stream
        test_stream.delete()
示例#12
0
    def test_bulk_write_datapoints_multiple_streams(self):
        #
        # This test verifies that we can write in bulk a bunch of datapoints to several
        # datastreams and read them back.
        #
        SID_FMT = "pythondc-inttest/test_bulk_write_datapoints_multiple_streams-{}"
        datapoints = []
        dt = datetime.datetime.now()
        for i in range(300):
            datapoints.append(
                DataPoint(
                    stream_id=SID_FMT.format(i % 3),
                    data_type=STREAM_TYPE_INTEGER,
                    units="meters",
                    timestamp=dt - datetime.timedelta(seconds=300 - i),
                    data=i,
                ))

        # remove any existing data before starting out
        for i in range(3):
            s = self._dc.streams.get_stream_if_exists(SID_FMT.format(i % 3))
            if s:
                s.delete()

        self._dc.streams.bulk_write_datapoints(datapoints)

        for i in range(3):
            stream = self._dc.streams.get_stream(SID_FMT.format(i))
            for j, dp in enumerate(stream.read(newest_first=False)):
                self.assertEqual(dp.get_data(), j * 3 + i)
            stream.delete()
 def test_get_data_no_conversion(self):
     # This is to prove that the issue of calling the DSTREAM_TYPE_MAP conversion
     # methods is no longer done on the get_data call.  Previously this could
     # result in already converted data trying to be converted again.  For most
     # types this is not an issue (i.e. calling float on a float) however, some
     # conversions could have typing issues when run on their own results.
     old_float_conversion = DSTREAM_TYPE_MAP[STREAM_TYPE_FLOAT]
     mfloat = mock.Mock(side_effect=float)
     DSTREAM_TYPE_MAP[STREAM_TYPE_FLOAT] = (mfloat, str)
     my_float = 3.14159265358
     dp = DataPoint(
         data_type=STREAM_TYPE_FLOAT,
         data=my_float,
         quality=0
     )
     self.assertEqual(my_float, dp.get_data())
     self.assertFalse(mfloat.called)
     DSTREAM_TYPE_MAP[STREAM_TYPE_FLOAT] = old_float_conversion
def write_points_and_delete_some(dc):
    test_stream = dc.streams.get_stream_if_exists("test")

    if test_stream is not None:
        test_stream.delete()

    # get a test stream reference
    test_stream = dc.streams.get_stream_if_exists("test")

    # we want a clean stream to work with.  If the stream exists, nuke it
    if test_stream is not None:
        test_stream.delete()

    test_stream = dc.streams.create_stream(
        stream_id="test",
        data_type='float',
        description='a stream used for testing',
        units='some-unit',
    )

    print("Writing data points with five second delay")
    for i in range(5):
        print("Writing point {} / 5".format(i + 1))
        test_stream.write(
            DataPoint(data=i * 1000, description="This is {} * pi".format(i)))
        if i < (5 - 1):
            time.sleep(1)

    points = list(test_stream.read(newest_first=False))
    print("Read {} data points, removing the first".format(len(points)))

    # Remove the first
    test_stream.delete_datapoint(points[0])
    points = list(test_stream.read(newest_first=False))
    print(
        "Read {} data points, removing ones written in last 30 seconds".format(
            len(points)))

    # delete the ones in the middle
    test_stream.delete_datapoints_in_time_range(
        start_dt=points[1].get_timestamp(), end_dt=points[-1].get_timestamp())
    points = list(test_stream.read(newest_first=False))
    print("Read {} data points.  Will try to delete all next".format(
        len(points)))
    pprint.pprint(points)

    # let's try without any range at all and see if they all get deleted
    test_stream.delete_datapoints_in_time_range()
    points = list(test_stream.read(newest_first=False))
    print("Read {} data points".format(len(points)))

    test_stream.delete()
def fill_classroom_with_student_ids(classroom):
    # fake data with wide range of timestamps
    now = time.time()
    one_day_in_seconds = 86400

    datapoints = list()
    for student_id in xrange(100):
        deviation = random.randint(0, one_day_in_seconds)
        random_time = now + deviation
        datapoint = DataPoint(data=student_id,
                              timestamp=datetime.datetime.fromtimestamp(random_time),
                              data_type=STREAM_TYPE_INTEGER)
        datapoints.append(datapoint)

    classroom.bulk_write_datapoints(datapoints)
示例#16
0
    def test_write_simple(self):
        self.prepare_response("POST",
                              "/ws/DataPoint/test",
                              CREATE_DATAPOINT_RESPONSE,
                              status=201)
        test_stream = self._get_stream(GET_TEST_DATA_STREAM)
        test_stream.write(DataPoint(data=123.4, ))

        # verify that the body sent to the device cloud is sufficiently minimal
        self.assertEqual(
            httpretty.last_request().body,
            six.b('<DataPoint>'
                  '<streamId>test</streamId>'
                  '<data>123.4</data>'
                  '</DataPoint>'))
    def test_from_json_conversion(self):
        stream = self._get_stream("test", with_cached_data=False)
        self.prepare_response("GET", "/ws/DataStream/test", GET_TEST_DATA_STREAM)
        test_json_data = {six.u('description'): six.u('Test'),
                          six.u('quality'): six.u('20'),
                          six.u('timestamp'): six.u('1404683207981'),
                          six.u('data'): six.u('3.14159265358'),
                          six.u('serverTimestampISO'): six.u('2014-07-06T21:46:47.981Z'),
                          six.u('location'): six.u('1.0,2.0,3.0'),
                          six.u('timestampISO'): six.u('2014-07-06T21:46:47.981Z'),
                          six.u('serverTimestamp'): six.u('1404683207981'),
                          six.u('id'): six.u('07d77854-0557-11e4-ab44-fa163e7ebc6b')}

        dp = DataPoint.from_json(stream, test_json_data)
        self.assertEqual(3.14159265358, dp.get_data())
示例#18
0
    def test_event_reception(self):
        rx = []

        def receive_notification(notification):
            rx.append(notification)
            return True

        topics = ['DataPoint', 'FileData']
        monitor = self._dc.monitor.get_monitor(topics)
        if monitor:
            monitor.delete()
        monitor = self._dc.monitor.create_tcp_monitor(topics)
        monitor.add_callback(receive_notification)

        self._dc.filedata.write_file("/~/inttest/monitor_tcp/", "test_file.txt", six.b("Hello, world!"), "text/plain")
        self._dc.streams.get_stream("inttest/monitor_tcp").write(DataPoint(10))

        # Wait for the evenets to come in from the cloud
        time.sleep(3)
        self._dc.monitor.stop_listeners()

        try:
            fd_push_seen = False
            dp_push_seen = False
            for rec in rx:
                msg = rec['Document']['Msg']
                fd = msg.get('FileData', None)
                if fd and 'id' in fd:
                    if (fd['id']['fdName'] == 'test_file.txt' and
                            fd['id']['fdPath'] == '/db/7603_Digi/inttest/monitor_tcp/'):
                        fd_push_seen = True
                # else:
                #     print('id not in test_event_reception/fd: {}'.format(rx))
                dp = msg.get('DataPoint')
                if dp and 'streamId' in dp:
                    print('test_event_reception/dp: {}'.format(dp))
                    if dp['streamId'] == 'inttest/monitor_tcp':
                        dp_push_seen = True
                # else:
                #     print('streamId not in test_event_reception/dp: {}'.format(rx))
            self.assertTrue(fd_push_seen)
            self.assertTrue(dp_push_seen)
        except:
            # add some additional debugging information
            pprint.pprint(rx)
            raise
def bulk_write_datapoints_single_stream(dc):
    datapoints = []
    for i in range(300):
        datapoints.append(
            DataPoint(
                data_type=STREAM_TYPE_INTEGER,
                units="meters",
                data=i,
            ))

    stream = dc.streams.get_stream("my/test/bulkstream")
    stream.bulk_write_datapoints(datapoints)
    print("---" + stream.get_stream_id() + "---")
    print(" ".join(
        str(dp.get_data()) for dp in stream.read(newest_first=False)))
    print("")
    stream.delete()
示例#20
0
    def test_from_json_conversion(self):
        stream = self._get_stream("test", with_cached_data=False)
        self.prepare_response("GET", "/ws/DataStream/test",
                              GET_TEST_DATA_STREAM)
        test_json_data = {
            six.u('description'): six.u('Test'),
            six.u('quality'): six.u('20'),
            six.u('timestamp'): six.u('1404683207981'),
            six.u('data'): six.u('3.14159265358'),
            six.u('serverTimestampISO'): six.u('2014-07-06T21:46:47.981Z'),
            six.u('location'): six.u('1.0,2.0,3.0'),
            six.u('timestampISO'): six.u('2014-07-06T21:46:47.981Z'),
            six.u('serverTimestamp'): six.u('1404683207981'),
            six.u('id'): six.u('07d77854-0557-11e4-ab44-fa163e7ebc6b')
        }

        dp = DataPoint.from_json(stream, test_json_data)
        self.assertEqual(3.14159265358, dp.get_data())
def bulk_write_datapoints_multiple_streams(dc):
    datapoints = []
    for i in range(300):
        datapoints.append(
            DataPoint(
                stream_id="my/stream%d" % (i % 3),
                data_type=STREAM_TYPE_INTEGER,
                units="meters",
                data=i,
            ))
    dc.streams.bulk_write_datapoints(datapoints)

    for stream in dc.streams.get_streams():
        if stream.get_stream_id().startswith('my/stream'):
            print("---" + stream.get_stream_id() + "---")
            print(" ".join(
                str(dp.get_data()) for dp in stream.read(newest_first=False)))
            print("")
        stream.delete()
示例#22
0
    def test_bulk_write_datapoints_single_stream(self):
        #
        # This test verifies that we can write in bulk a bunch of datapoints to a single
        # stream and read them back.
        #
        datapoints = []
        dt = datetime.datetime.now()
        for i in range(300):
            datapoints.append(
                DataPoint(
                    data_type=STREAM_TYPE_INTEGER,
                    units="meters",
                    timestamp=dt - datetime.timedelta(seconds=300 - i),
                    data=i,
                ))

        stream = self._dc.streams.get_stream_if_exists(
            "pythondc-inttest/test_bulk_write_datapoints_single_stream")
        if stream:
            stream.delete()

        stream = self._dc.streams.get_stream(
            "pythondc-inttest/test_bulk_write_datapoints_single_stream")
        stream.bulk_write_datapoints(datapoints)
        stream_contents_asc = list(stream.read(newest_first=False))
        self.assertEqual(len(stream_contents_asc), 300)
        for i, dp in enumerate(stream_contents_asc):
            self.assertEqual(dp.get_units(), "meters")
            self.assertEqual(dp.get_data_type(), STREAM_TYPE_INTEGER)
            self.assertEqual(dp.get_data(), i)
            self.assertEqual(
                dp.get_stream_id(),
                "pythondc-inttest/test_bulk_write_datapoints_single_stream")
            self.assertEqual(dp.get_location(), None)
            self.assertEqual(dp.get_description(), "")
            self.assertIsInstance(dp.get_server_timestamp(), datetime.datetime)
            self.assertIsInstance(dp.get_id(), *six.string_types)
            self.assertEqual(dp.get_quality(), 0)
            self.assertIsInstance(dp.get_timestamp(), datetime.datetime)

        # Cleanup by deleting the stream
        stream.delete()
示例#23
0
    def test_bulk_write_multiple_pages(self):
        # Actual response has a ton of locations for the new data points
        requests = []

        def handle_request(request, uri, headers):
            requests.append(request)
            return (
                200, headers,
                '<?xml version="1.0" encoding="ISO-8859-1"?><result></result>')

        self.prepare_response("POST", "/ws/DataPoint", handle_request)
        datapoints = []
        for i in range(300):
            datapoints.append(
                DataPoint(
                    stream_id="my/stream%d" % (i % 3),
                    data_type=STREAM_TYPE_INTEGER,
                    units="meters",
                    data=i,
                ))
        self.dc.streams.bulk_write_datapoints(datapoints)
        self.assertEqual(len(requests), 2)

        def parse_for_data(response):
            root = ET.fromstring(response)
            return [int(x.text) for x in root.iter('data')]

        def parse_for_stream_id(response):
            root = ET.fromstring(response)
            return set(x.text for x in root.iter('streamId'))

        self.assertEqual(parse_for_data(requests[0].body), list(range(250)))
        self.assertEqual(parse_for_data(requests[1].body),
                         list(range(250, 300)))
        self.assertEqual(parse_for_stream_id(requests[0].body),
                         {'my/stream0', 'my/stream1', 'my/stream2'})
        self.assertEqual(parse_for_stream_id(requests[1].body),
                         {'my/stream0', 'my/stream1', 'my/stream2'})
示例#24
0
def test_tcp_monitor(dc):
    # Create a fresh monitor over a pretty broad set of topics
    topics = ['DeviceCore', 'FileDataCore', 'FileData', 'DataPoint']
    mon = dc.monitor.get_monitor(topics)
    if mon is not None:
        mon.delete()
    mon = dc.monitor.create_tcp_monitor(topics)
    pprint.pprint(mon.get_metadata())

    def listener(data):
        pprint.pprint(data)
        return True  # we got it!

    mon.add_callback(listener)

    test_stream = dc.streams.get_stream("test")
    try:
        while True:
            test_stream.write(DataPoint(random.random()))
            time.sleep(3.14)
    except KeyboardInterrupt:
        print("Shutting down threads...")

    dc.monitor.stop_listeners()
示例#25
0
 def test_set_bad_timestamp(self):
     dp = DataPoint(123)
     self.assertRaises(ValueError, dp.set_timestamp,
                       "abcdefg")  # not parseable by arrow
     self.assertRaises(TypeError, dp.set_timestamp, 12345)
示例#26
0
 def test_type_checking(self):
     dp = DataPoint(123)
     self.assertRaises(TypeError, dp.set_description, 5)
     self.assertRaises(TypeError, dp.set_stream_id, [1, 2, 3])
示例#27
0
 def test_set_quality_float(self):
     # cover truncation case for code coverage
     dp = DataPoint(123)
     dp.set_quality(99.5)
     self.assertEqual(dp.get_quality(), 99)
示例#28
0
 def test_set_location_valid(self):
     dp = DataPoint(123)
     dp.set_location((4, 5, 6))
     self.assertEqual(dp.get_location(), (4.0, 5.0, 6.0))
 def test_set_quality_float(self):
     # cover truncation case for code coverage
     dp = DataPoint(123)
     dp.set_quality(99.5)
     self.assertEqual(dp.get_quality(), 99)
示例#30
0
 def test_bulk_write_datapoints_not_a_list_of_datapoints(self):
     self.assertRaises(TypeError, self.dc.streams.bulk_write_datapoints,
                       [5, DataPoint(123)])
示例#31
0
 def test_bulk_write_datapoints_not_a_list(self):
     # should be passing a list but we are just giving it a datapoint
     stream = self._get_stream("test")
     self.assertRaises(TypeError, stream.bulk_write_datapoints,
                       DataPoint(123))
示例#32
0
 def test_bulk_write_datapoints_not_a_list_of_datapoints(self):
     # should be passing a list of just datapoints, but we snuck in a 5!
     stream = self._get_stream("test")
     self.assertRaises(TypeError, stream.bulk_write_datapoints,
                       [5, DataPoint(123)])
 def test_set_location_valid(self):
     dp = DataPoint(123)
     dp.set_location((4, 5, 6))
     self.assertEqual(dp.get_location(), (4.0, 5.0, 6.0))
示例#34
0
 def test_bad_location_string(self):
     dp = DataPoint(123)
     self.assertRaises(ValueError, dp.set_location, "0,1")
     self.assertRaises(ValueError, dp.set_location, "0,1,2,3")
     self.assertRaises(ValueError, dp.set_location, "bad-input")
示例#35
0
 def test_bad_location_type(self):
     dp = DataPoint(123)
     self.assertRaises(TypeError, dp.set_location, datetime.datetime.now())
示例#36
0
 def test_bulk_write_datapoints_datapoint_has_no_stream_id(self):
     self.assertRaises(ValueError, self.dc.streams.bulk_write_datapoints,
                       [DataPoint(123)])