def deserialize(self, stream, content_type):
     """Read a recordio-protobuf stream from a SageMaker Semantic Segmentation algorithm endpoint
     Args:
         stream (botocore.response.StreamingBody): A stream of bytes.
         content_type (str): The MIME type of the data.
     Returns:
         array: numpy array of class probabilities per pixel
     """
     try:
         # Unpack the RecordIO wrapper first:
         reccontent = HackyProtobufDeserializer.next_recordio_record(stream)
         # Then load the protocol buffer:
         rec = Record()
         print("Parsing protobuf...")
         protobuf = rec.ParseFromString(reccontent)
         # Then read the two provided tensors `target` (predictions) and `shape`, squeeze out any batch
         # dimensions (since we'll always be predicting on a single image) and shape target appropriately:
         print("Fetching Tensors...")
         values = list(rec.features["target"].float32_tensor.values)
         shape = list(rec.features["shape"].int32_tensor.values)
         print("reshaping arrays...")
         shape = squeeze(shape)
         mask = reshape(array(values), shape)
         return squeeze(mask, axis=0)
     finally:
         stream.close()
示例#2
0
def read_records(file):
    """Eagerly read a collection of amazon Record protobuf objects from file."""
    records = []
    for record_data in read_recordio(file):
        record = Record()
        record.ParseFromString(record_data)
        records.append(record)
    return records
def test_serializer():
    s = RecordSerializer()
    array_data = [[1.0, 2.0, 3.0], [10.0, 20.0, 30.0]]
    buf = s.serialize(np.array(array_data))
    for record_data, expected in zip(read_recordio(buf), array_data):
        record = Record()
        record.ParseFromString(record_data)
        assert record.features["values"].float64_tensor.values == expected
def test_serializer_accepts_one_dimensional_array():
    s = RecordSerializer()
    array_data = [1.0, 2.0, 3.0]
    buf = s.serialize(np.array(array_data))
    record_data = next(read_recordio(buf))
    record = Record()
    record.ParseFromString(record_data)
    assert record.features["values"].float64_tensor.values == array_data
示例#5
0
def read_pipe(pipe):
    with open(pipe,'rb') as f:
        for rec in read_recordio(f):
            print("read record")
            print(rec)
            record = Record()
            record.ParseFromString(rec)
            print("record parsed")
            print(record)
def test_int_write_numpy_to_dense_tensor():
    array_data = [[1, 2, 3], [10, 20, 3]]
    array = np.array(array_data)
    with tempfile.TemporaryFile() as f:
        write_numpy_to_dense_tensor(f, array)
        f.seek(0)
        for record_data, expected in zip(read_recordio(f), array_data):
            record = Record()
            record.ParseFromString(record_data)
            assert record.features["values"].int32_tensor.values == expected
def test_float32_write_numpy_to_dense_tensor():
    array_data = [[1.0, 2.0, 3.0], [10.0, 20.0, 30.0]]
    array = np.array(array_data).astype(np.dtype("float32"))
    with tempfile.TemporaryFile() as f:
        write_numpy_to_dense_tensor(f, array)
        f.seek(0)
        for record_data, expected in zip(read_recordio(f), array_data):
            record = Record()
            record.ParseFromString(record_data)
            assert record.features["values"].float32_tensor.values == expected
示例#8
0
def test_float32_label():
    array_data = [[1, 2, 3], [10, 20, 3]]
    array = np.array(array_data)
    label_data = np.array([99, 98, 97]).astype(np.dtype('float32'))
    with tempfile.TemporaryFile() as f:
        write_numpy_to_dense_tensor(f, array, label_data)
        f.seek(0)
        for record_data, expected, label in zip(_read_recordio(f), array_data, label_data):
            record = Record()
            record.ParseFromString(record_data)
            assert record.features["values"].int32_tensor.values == expected
            assert record.label["values"].float32_tensor.values == [label]
示例#9
0
def test_dense_int_write_spmatrix_to_sparse_tensor():
    array_data = [[1.0, 2.0, 3.0], [10.0, 20.0, 30.0]]
    keys_data = [[0, 1, 2], [0, 1, 2]]
    array = coo_matrix(np.array(array_data).astype(np.dtype('int')))
    with tempfile.TemporaryFile() as f:
        write_spmatrix_to_sparse_tensor(f, array)
        f.seek(0)
        for record_data, expected_data, expected_keys in zip(read_recordio(f), array_data, keys_data):
            record = Record()
            record.ParseFromString(record_data)
            assert record.features["values"].int32_tensor.values == expected_data
            assert record.features["values"].int32_tensor.keys == expected_keys
            assert record.features["values"].int32_tensor.shape == [len(expected_data)]
def test_dense_float64_spmatrix_to_sparse_label():
    array_data = [[1, 2, 3], [10, 20, 3]]
    keys_data = [[0, 1, 2], [0, 1, 2]]
    array = coo_matrix(np.array(array_data).astype("float64"))
    label_data = np.array([99, 98, 97])
    with tempfile.TemporaryFile() as f:
        write_spmatrix_to_sparse_tensor(f, array, label_data)
        f.seek(0)
        for record_data, expected_data, expected_keys, label in zip(
            read_recordio(f), array_data, keys_data, label_data
        ):
            record = Record()
            record.ParseFromString(record_data)
            assert record.features["values"].float64_tensor.values == expected_data
            assert record.features["values"].float64_tensor.keys == expected_keys
            assert record.label["values"].int32_tensor.values == [label]
            assert record.features["values"].float64_tensor.shape == [len(expected_data)]
示例#11
0
    def infer(self, filename, output_folder, endpoint, show_image):
        """
        :param filename:
        :param output_folder:
        :param endpoint:
        :param show_image:
        :return:
        """
        output_filename = os.path.splitext(filename)[0]
        output_filename = os.path.basename(output_filename)
        output_filename = os.path.join(output_folder, output_filename + '_inference.png')

        runtime = boto3.Session().client('sagemaker-runtime')

        image = pillow.Image.open(filename)
        image.thumbnail([800, 600], pillow.Image.ANTIALIAS)
        image.save(filename, "JPEG")

        with open(filename, 'rb') as f:
            payload = f.read()
            payload = bytearray(payload)

        response = runtime.invoke_endpoint(EndpointName=endpoint, ContentType='application/x-image', Body=payload)
        results_file = 'results.rec'
        with open(results_file, 'wb') as f:
            f.write(response['Body'].read())

        rec = Record()
        recordio = mx.recordio.MXRecordIO(results_file, 'r')
        protobuf = rec.ParseFromString(recordio.read())
        values = list(rec.features["target"].float32_tensor.values)
        shape = list(rec.features["shape"].int32_tensor.values)
        shape = np.squeeze(shape)
        mask = np.reshape(np.array(values), shape)
        mask = np.squeeze(mask, axis=0)
        pred_map = np.argmax(mask, axis=0)

        if show_image is True:
            utils_obj = utils.Utils()
            utils_obj.show_image(pred_map)

        plt.imshow(pred_map, vmin=0, vmax=settings.HYPER['num_classes'] - 1, cmap='jet')
        plt.savefig(output_filename)
示例#12
0
def test_sparse_int_write_spmatrix_to_sparse_tensor():
    n = 4
    array_data = [[1.0, 2.0], [10.0, 30.0], [100.0, 200.0, 300.0, 400.0], [1000.0, 2000.0, 3000.0]]
    keys_data = [[0, 1], [1, 2], [0, 1, 2, 3], [0, 2, 3]]

    flatten_data = list(itertools.chain.from_iterable(array_data))
    y_indices = list(itertools.chain.from_iterable(keys_data))
    x_indices = [[i] * len(keys_data[i]) for i in range(len(keys_data))]
    x_indices = list(itertools.chain.from_iterable(x_indices))

    array = coo_matrix((flatten_data, (x_indices, y_indices)), dtype='int')
    with tempfile.TemporaryFile() as f:
        write_spmatrix_to_sparse_tensor(f, array)
        f.seek(0)
        for record_data, expected_data, expected_keys in zip(read_recordio(f), array_data, keys_data):
            record = Record()
            record.ParseFromString(record_data)
            assert record.features["values"].int32_tensor.values == expected_data
            assert record.features["values"].int32_tensor.keys == expected_keys
            assert record.features["values"].int32_tensor.shape == [n]