def predict(self, data):
        # TODO: this evaluates a single example, i.e. mini-batch of one.
        # generalize this to general case.

        if len(data.shape) == 1:
            data = data.reshape((1, data.shape[0]))

        classification_response = self.skil.api.multipredict(
            deployment_name=self.deployment.name,
            model_name=self.model_name,
            version_name="default",
            body=skil_client.MultiPredictRequest(
                id=str(uuid.uuid1()),
                needs_pre_processing=False,
                inputs=[
                    skil_client.INDArray(ordering='c',
                                         shape=list(data.shape),
                                         data=data.tolist()[0]),
                    skil_client.
                    INDArray(  # This is the keep_prob placeholder data
                        ordering='c',
                        shape=[1],
                        data=[1.0])
                ]))
        output = classification_response.outputs[0]
        prediction = np.asarray(output.data)
        shape = output.shape
        return prediction.reshape(shape)
    def predict_single(self, data, version='default'):
        """Predict for a single input.

        # Arguments:
            data: `numpy.ndarray` (or list thereof). Input data.
            version: version of the deployed service

        # Returns
            `numpy.ndarray` instance for single output model and list of `numpy.ndarray` for multi-output model.
        """
        if isinstance(data, list):
            inputs = [self._indarray(np.expand_dims(x, 0)) for x in data]
        else:
            inputs = [self._indarray(np.expand_dims(data, 0))]

        classification_response = self.skil.api.multipredict(
            deployment_name=self.deployment.name,
            model_name=self.model_name,
            version_name=version,
            body=skil_client.MultiPredictRequest(
                id=str(uuid.uuid1()),
                needs_pre_processing=False,
                inputs=inputs
            )
        )
        output = classification_response.outputs[0]
        return np.asarray(output.data).reshape(output.shape)
    def predict(self, data, version='default'):
        """Predict for given batch of data.

        # Arguments:
            data: `numpy.ndarray` (or list thereof). Batch of input data, or list of batches for multi-input model.
            version: version of the deployed service

        # Returns
            `numpy.ndarray` instance for single output model and list of `numpy.ndarray` for multi-output model.
        """
        if isinstance(data, list):
            inputs = [self._indarray(x) for x in data]
        else:
            inputs = [self._indarray(data)]

        classification_response = self.skil.api.multipredict(
            deployment_name=self.deployment.name,
            model_name=self.model_name,
            version_name=version,
            body=skil_client.MultiPredictRequest(
                id=str(uuid.uuid1()),
                needs_pre_processing=False,
                inputs=inputs
            )
        )
        outputs = classification_response.outputs
        outputs = [np.asarray(o.data).reshape(o.shape) for o in outputs]
        if len(outputs) == 1:
            return outputs[0]
        return outputs
Exemple #4
0
r = requests.post("http://192.168.1.128:9008/login", json={"userId": "admin", "password": "******"})
token = r.json()['token']

configuration.api_key['authorization'] = f'Bearer {token}'
api_instance = skil_client.DefaultApi(skil_client.ApiClient(configuration))


list_ind_array = [[convert_indarray(np.expand_dims(image_batch[i,:,:,:], axis=0))] for i in range(12)]

batch_results = []
index = 0
for data in list_ind_array:
    print("getting response for batch image ", index)
    body_data = skil_client.MultiPredictRequest(
                    id=str(uuid.uuid1()),
                    needs_pre_processing=False,
                    inputs=data
                )
    
    response = api_instance.multipredict("age", "default", "outputgraphwithsoftmax", body=body_data )
    response = response.to_dict()
    output = response['outputs'][0]
    probabilities = output['data']
    probabilities = np.array(probabilities)
    batch_results.append(probabilities)
    index+=1
    time.sleep(0.5) #prevent spamming

output = batch_results[0]    
batch_sz = len(batch_results)
arg_max_each_batch = []