def test_raises_if_cant_stay_in_limit(self):
     data = 'a' * 4000000
     message = RecordMessage(stream='colors', record=data)
     with self.assertRaisesRegex(target_stitch.BatchTooLargeException,
                                 re.compile('batch size limit of 4 Mb')):
         target_stitch.serialize([message], self.schema, self.key_names,
                                 4000000)
示例#2
0
 def test_raises_if_cant_stay_in_limit(self):
     data = 'a' * 21000000
     message = RecordMessage(stream='colors', record=data)
     with self.assertRaisesRegex(
             target_stitch.BatchTooLargeException,
             re.compile('the Stitch API limit of 20 Mb')):
         target_stitch.serialize([message], self.schema, self.key_names,
                                 self.bookmark_names, 4000000,
                                 target_stitch.DEFAULT_MAX_BATCH_RECORDS)
    def test_serialize_time_extracted(self):
        """ Test that we're not corrupting timestamps with cross platform parsing. (Test case for OSX, specifically) """
        expected = "1970-01-01T03:45:23.000000Z"
        test_time = datetime.datetime(1970, 1, 1, 3, 45, 23, tzinfo=pytz.utc)

        record = [RecordMessage("greetings",'{greeting: "hi"}', time_extracted=test_time)]
        schema = '{"type": "object", "properties": {"greeting": {"type": "string"}}}'
        batch = target_stitch.serialize(record, schema, [], [], 1000, target_stitch.DEFAULT_MAX_BATCH_RECORDS)[0]
        actual = json.loads(batch)["messages"][0]["time_extracted"]

        self.assertEqual(expected, actual)
 def serialize_with_limit(self, limit):
     return target_stitch.serialize(self.messages, self.schema,
                                    self.key_names, limit)
 def serialize_with_limit(self, limit):
     return target_stitch.serialize(self.messages, self.schema, self.key_names, self.bookmark_names, limit, target_stitch.DEFAULT_MAX_BATCH_RECORDS)