Esempio n. 1
0
def inference(image_path, model_path, batch_size, sc):
    imageDF = NNImageReader.readImages(image_path,
                                       sc,
                                       resizeH=300,
                                       resizeW=300,
                                       image_codec=1)
    getName = udf(lambda row: row[0], StringType())
    transformer = ChainedPreprocessing([
        RowToImageFeature(),
        ImageResize(256, 256),
        ImageCenterCrop(224, 224),
        ImageChannelNormalize(123.0, 117.0, 104.0),
        ImageMatToTensor(),
        ImageFeatureToTensor()
    ])

    model = Model.loadModel(model_path)

    est = Estimator.from_bigdl(model=model, feature_preprocessing=transformer)

    predictionDF = est.predict(data=imageDF,
                               batch_size=batch_size,
                               feature_cols="image").withColumn(
                                   "name", getName(col("image")))
    return predictionDF
def inference(image_path, model_path, batch_size, sc):
    imageDF = NNImageReader.readImages(image_path,
                                       sc,
                                       resizeH=300,
                                       resizeW=300,
                                       image_codec=1)
    getName = udf(lambda row: row[0], StringType())
    transformer = ChainedPreprocessing([
        RowToImageFeature(),
        ImageResize(256, 256),
        ImageCenterCrop(224, 224),
        ImageChannelNormalize(123.0, 117.0, 104.0),
        ImageMatToTensor(),
        ImageFeatureToTensor()
    ])

    model = Model.loadModel(model_path)
    classifier_model = NNClassifierModel(model, transformer)\
        .setFeaturesCol("image").setBatchSize(batch_size)
    predictionDF = classifier_model.transform(imageDF).withColumn(
        "name", getName(col("image")))
    return predictionDF
Esempio n. 3
0
 def _load(self, path):
     return Model.loadModel(path, bigdl_type=self.bigdl_type)