def test_put_blob_record_success(self): project_name = 'put' topic_name = 'success' records = [] data = None with open(os.path.join(_TESTS_PATH, '../resources/datahub.png'), 'rb') as f: data = f.read() record0 = BlobRecord(blob_data=data) record0.shard_id = '0' records.append(record0) record1 = BlobRecord(blob_data=data) record1.hash_key = '4FFFFFFFFFFFFFFD7FFFFFFFFFFFFFFD' records.append(record1) record2 = BlobRecord(blob_data=data) record2.partition_key = 'TestPartitionKey' records.append(record2) def check(request): assert request.method == 'POST' assert request.url == 'http://endpoint/projects/put/topics/success/shards' content = json.loads(request.body) assert content['Action'] == 'pub' assert len(content['Records']) == 3 assert base64.b64decode(content['Records'][0]['Data']) == data assert base64.b64decode(content['Records'][1]['Data']) == data assert base64.b64decode(content['Records'][2]['Data']) == data with HTTMock(gen_mock_api(check)): put_result = dh.put_records(project_name, topic_name, records) assert put_result.failed_record_count == 0 assert put_result.failed_records == []
def test_put_blob_record_pb_success(self): project_name = 'put' topic_name = 'success' records = [] data = None with open(os.path.join(_TESTS_PATH, '../resources/datahub.png'), 'rb') as f: data = f.read() record0 = BlobRecord(blob_data=data) record0.shard_id = '0' records.append(record0) record1 = BlobRecord(blob_data=data) record1.hash_key = '4FFFFFFFFFFFFFFD7FFFFFFFFFFFFFFD' records.append(record1) record2 = BlobRecord(blob_data=data) record2.partition_key = 'TestPartitionKey' records.append(record2) def check(request): assert request.method == 'POST' assert request.url == 'http://endpoint/projects/put/topics/success/shards' crc, compute_crc, pb_str = unwrap_pb_frame(request.body) pb_put_record_request = PutRecordsRequest() pb_put_record_request.ParseFromString(pb_str) for pb_record in pb_put_record_request.records: assert pb_record.data.data[0].value == data with HTTMock(gen_pb_mock_api(check)): put_result = dh2.put_records(project_name, topic_name, records) assert put_result.failed_record_count == 0 assert put_result.failed_records == []
def test_put_blob_record_success(self): project_name = 'put' topic_name = 'success' records = [] data = None with open(os.path.join(_TESTS_PATH, '../resources/datahub.png'), 'rb') as f: data = f.read() record0 = BlobRecord(blob_data=data) record0.shard_id = '0' records.append(record0) record1 = BlobRecord(blob_data=data) record1.hash_key = '4FFFFFFFFFFFFFFD7FFFFFFFFFFFFFFD' records.append(record1) record2 = BlobRecord(blob_data=data) record2.partition_key = 'TestPartitionKey' records.append(record2) with HTTMock(datahub_api_mock): put_result = dh.put_records(project_name, topic_name, records) assert put_result.failed_record_count == 0 assert put_result.failed_records == []
print "topic type illegal!" sys.exit(-1) print "=======================================\n\n" shards = dh.list_shards(project_name, topic_name) for shard in shards: print shard print "=======================================\n\n" records = [] data = None with open('datahub.png', 'rb') as f: data = f.read() record0 = BlobRecord(blobdata=data) record0.shard_id = '0' records.append(record0) record1 = BlobRecord(blobdata=data) record1.shard_id = '1' records.append(record1) record2 = BlobRecord(blobdata=data) record2.shard_id = '2' records.append(record2) failed_indexs = dh.put_records(project_name, topic_name, records) print "put blob %d records, failed list: %s" %(len(records), failed_indexs) # failed_indexs如果非空最好对failed record再进行重试 print "=======================================\n\n"
put_result = dh.put_records(project_name, tuple_topic_name, records0) print(put_result) print("put tuple %d records, failed count: %d" % (len(records0), put_result.failed_record_count)) # failed_record_count如果大于0最好对failed record再进行重试 print("=======================================\n\n") except DatahubException as e: print(e) sys.exit(-1) # ===================== put blob records ===================== try: records1 = [] record3 = BlobRecord(blob_data=b'data') record3.shard_id = '0' record3.put_attribute('a', 'b') records1.append(record3) put_result = dh.put_records(project_name, blob_topic_name, records1) print(put_result) except DatahubException as e: print(e) sys.exit(-1) # ===================== get cursor ===================== shard_id = "0" sequence = 0 time_stamp = int(time.time()) cursor_result0 = dh.get_cursor(project_name, tuple_topic_name, shard_id, CursorType.OLDEST)