def testClassify(self):
        """Test PredictionService.Classify implementation."""
        model_path = self._GetSavedModelBundlePath()

        atexit.register(self.TerminateProcs)
        model_server_address = self.RunServer(PickUnusedPort(), 'default',
                                              model_path)

        print 'Sending Classify request...'
        # Prepare request
        request = classification_pb2.ClassificationRequest()
        request.model_spec.name = 'default'
        request.model_spec.signature_name = 'classify_x_to_y'

        example = request.input.example_list.examples.add()
        example.features.feature['x'].float_list.value.extend([2.0])

        # Send request
        host, port = model_server_address.split(':')
        channel = implementations.insecure_channel(host, int(port))
        stub = prediction_service_pb2.beta_create_PredictionService_stub(
            channel)
        result = stub.Classify(request, RPC_TIMEOUT)  # 5 secs timeout
        # Verify response
        self.assertEquals(1, len(result.result.classifications))
        self.assertEquals(1, len(result.result.classifications[0].classes))
        expected_output = 3.0
        self.assertEquals(expected_output,
                          result.result.classifications[0].classes[0].score)
    def testClassify(self):
        """Test PredictionService.Classify implementation."""
        model_path = self._GetSavedModelBundlePath()
        model_server_address = TensorflowModelServerTest.RunServer(
            'default', model_path)[1]

        print('Sending Classify request...')
        # Prepare request
        request = classification_pb2.ClassificationRequest()
        request.model_spec.name = 'default'
        request.model_spec.signature_name = 'classify_x_to_y'

        example = request.input.example_list.examples.add()
        example.features.feature['x'].float_list.value.extend([2.0])

        # Send request
        result = self._MakeStub(model_server_address).Classify(
            request, RPC_TIMEOUT)

        # Verify response
        self.assertEqual(1, len(result.result.classifications))
        self.assertEqual(1, len(result.result.classifications[0].classes))
        expected_output = 3.0
        self.assertEqual(expected_output,
                         result.result.classifications[0].classes[0].score)
        self._VerifyModelSpec(result.model_spec, request.model_spec.name,
                              request.model_spec.signature_name,
                              self._GetModelVersion(model_path))
示例#3
0
def Request(text):
    example = inputs.BuildTextExample(text_utils.TokenizeText(text))
    request = classification_pb2.ClassificationRequest()
    request.model_spec.name = 'default'
    request.model_spec.signature_name = FLAGS.signature_def
    request.input.example_list.examples.extend([example])
    return request
示例#4
0
 def _BuildClassificationRequests(self, signature_name: Text):
     for example in self.examples:
         request = classification_pb2.ClassificationRequest()
         request.model_spec.name = self._model_name
         request.model_spec.signature_name = signature_name
         request.input.example_list.examples.append(example)
         yield request
示例#5
0
def main(_):
  host, port = FLAGS.server.split(':')
  channel = implementations.insecure_channel(host, int(port))
  stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)
  # Send request

  # See prediction_service.proto for gRPC request/response details.

  sample_image1 = iR2G.img2gs_res(test_image,28,28)

  new_samples = np.array([sample_image1], dtype=np.float32)
  
  image = new_samples.reshape(784,)

  print(image.shape)

  #f = open(test_image, 'rb')
  #data = f.read()

  # Feature_dict
  request = classification_pb2.ClassificationRequest()
  request.model_spec.name = 'default'
  request.model_spec.signature_name = 'serving_default'
  example = request.input.example_list.examples.add()
  example.features.feature['x'].float_list.value.extend(image.astype(float))
  result = stub.Classify(request, 10.0)  # 10 secs timeout
  print(result)
示例#6
0
def test_pb_request():
    request = classification_pb2.ClassificationRequest()
    request.model_spec.name = "generic_model"
    request.model_spec.signature_name = DEFAULT_SERVING_SIGNATURE_DEF_KEY
    example = request.input.example_list.examples.add()

    data = [4.9, 2.5, 4.5, 1.7]
    example.features.feature[PREDICT_INPUTS].float_list.value.extend(data)

    serialized_output = requests.post("http://localhost:8080/invocations",
                                      data=request.SerializeToString(),
                                      headers={
                                          'Content-type':
                                          'application/octet-stream',
                                          'Accept': 'application/octet-stream'
                                      }).content

    classification_response = classification_pb2.ClassificationResponse()
    classification_response.ParseFromString(serialized_output)

    classifications_classes = classification_response.result.classifications[
        0].classes
    assert len(classifications_classes) == 3
    for c in classifications_classes:
        assert c.score < 1
        assert c.score > 0
示例#7
0
def classify(body):
    prediction_service_stub = _get_prediction_service_stub()
    request = ParseDict(body, classification.ClassificationRequest())
    result = prediction_service_stub.Classify(request)
    return MessageToDict(result,
                         preserving_proto_field_name=True,
                         including_default_value_fields=True)
示例#8
0
def call_servo(examples, serving_bundle):
  """Send an RPC request to the Servomatic prediction service.

  Args:
    examples: A list of tf.train.Examples that matches the model spec.
    serving_bundle: A `ServingBundle` object that contains the information to
      make the serving request.

  Returns:
    A ClassificationResponse or RegressionResponse proto.
  """
  parsed_url = urlparse('http://' + serving_bundle.inference_address)
  channel = implementations.insecure_channel(parsed_url.hostname,
                                             parsed_url.port)
  stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)

  if serving_bundle.model_type == 'classification':
    request = classification_pb2.ClassificationRequest()
  else:
    request = regression_pb2.RegressionRequest()
  request.model_spec.name = serving_bundle.model_name
  if serving_bundle.model_version is not None:
    request.model_spec.version.value = serving_bundle.model_version
  if serving_bundle.signature is not None:
    request.model_spec.signature_name = serving_bundle.signature
  request.input.example_list.examples.extend(examples)

  if serving_bundle.model_type == 'classification':
    return stub.Classify(request, 30.0)  # 30 secs timeout
  else:
    return stub.Regress(request, 30.0)  # 30 secs timeout
示例#9
0
def main():
    channel = implementations.insecure_channel(
        '10.211.44.8', 8500)  #the ip and port of your server host
    stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)

    # the test samples
    examples = []
    f = open('adult.test', 'r')
    for line in f:
        line = line.strip('\n').strip('.').split(',')
        example = get_input(line)
        examples.append(example)

    request = classification_pb2.ClassificationRequest()
    request.model_spec.name = 'dnn'  #your model_name which you set in docker  container
    request.input.example_list.examples.extend(examples)

    response = stub.Classify(request, 20.0)

    for index in range(len(examples)):
        print(index)
        max_class = max(response.result.classifications[index].classes,
                        key=lambda c: c.score)
        re = response.result.classifications[index]
        print(max_class.label,
              max_class.score)  # the prediction class and probability
示例#10
0
def query(channel, examples):

    # 发送请求
    # request = predict_pb2.PredictRequest()
    request = classification_pb2.ClassificationRequest()
    '''
    tensorflow_model_server --port=9000 --model_name=wide_deep --model_base_path=/tmp/pycharm_project_717/
    rec_sys/recall_strategy/wide_deep-hh/wide_deep_model/model/wide_deep_64_40_2_1556356457

    signature_def['serving_default']:
      The given SavedModel SignatureDef contains the following input(s):
        inputs['inputs'] tensor_info:
            dtype: DT_STRING
            shape: (-1)
            name: input_example_tensor:0
      The given SavedModel SignatureDef contains the following output(s):
        outputs['classes'] tensor_info:
            dtype: DT_STRING
            shape: (-1, 2)
            name: head/Tile:0
        outputs['scores'] tensor_info:
            dtype: DT_FLOAT
            shape: (-1, 2)
            name: head/predictions/probabilities:0
      Method name is: tensorflow/serving/classify

    '''
    request.model_spec.name = "wide_deep"  # the model name used above when starting TF serving
    request.model_spec.signature_name = "serving_default"  # get this from Saved Model CLI output
    stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)

    request.input.example_list.examples.extend(examples)
    response = stub.Classify(
        request, 10.0)  # 10 is the timeout in seconds, but its blazing fast
    print(dir(response))
    predictions = list(response.result.classifications)
    '''
    单个example输出记录:
    classes {
        label: "0"
        score: 0.46758782863616943
    }
    classes {
        label: "1"
        score: 0.5324121713638306
    }
    # predictions[0].classes
    [label: "0"
        score: 0.7774916887283325
    , label: "1"
        score: 0.22250832617282867
    ]
    '''
    prob = [
        round(predictions[i].classes[1].score, 4)
        for i in range(len(predictions))
    ]
    return prob
    def testIssueRequests_RaiseRpcErrorIfRpcFailed(self):
        # Prepare client and a side effect.
        request = classification_pb2.ClassificationRequest()
        client = TensorFlowServingClient('localhost:1234', 'a_model_name')
        self.prediction_stub.Classify.side_effect = grpc.RpcError

        # Call.
        with self.assertRaises(grpc.RpcError):
            client.IssueRequests([request])
示例#12
0
def Request(text, ngrams):
    text = text_utils.TokenizeText(text)
    ngrams = None
    if ngrams is not None:
        ngrams_list = text_utils.ParseNgramsOpts(ngrams)
        ngrams = text_utils.GenerateNgrams(text, ngrams_list)
    example = inputs.BuildTextExample(text, ngrams=ngrams)
    request = classification_pb2.ClassificationRequest()
    request.model_spec.name = 'default'
    request.model_spec.signature_name = 'proba'
    request.input.example_list.examples.extend([example])
    return request
示例#13
0
def main():
    parser = argparse.ArgumentParser(description='CTR model gRPC client')

    parser.add_argument(
        'tf_server',
        type=str,
        help='host:port for CTR Model TendorFlow Server')

    parser.add_argument(
        'chopstick_length',
        type=float,
        help='chopstick length to classify')

    parser.add_argument(
        '--model-name',
        type=str,
        default='tf_model',
        dest='model_name',
        help='model name to use')

    parser.add_argument(
        '--verbose',
        '-v',
        action='store_true',
        default=False,
        dest='verbose',
        help='verbose output')

    args = parser.parse_args()

    host, port = args.tf_server.split(':')
    channel = implementations.insecure_channel(host, int(port))
    stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)
    
    # We use predefined ClassificationRequest protobuf here. This API is useful
    # when you are working with tf.estimator package.
    # tf.estimator.export.ServingInputReceiver expects to revieve TFExample
    # serialized into string. All serialization and deserialization code is
    # handled for us with gRPC and ClassificationRequest/ServingInputReceiver
    # API.
    #
    # Consider using tf.make_tensor_proto function and
    # tf.saved_model.builder.SavedModelBuilder if you are not using
    # tf.estimator API
    request = classification_pb2.ClassificationRequest()
    request.model_spec.name = args.model_name
    example = request.input.example_list.examples.add()
    example.features.feature['Chopstick.Length'].float_list.value\
                                .append(args.chopstick_length)

    result = stub.Classify(request, 10.0)  # 10 secs timeout 

    print(result)
    def parse_request(self, serialized_data):
        request_fn_map = {
            PREDICT: lambda: predict_pb2.PredictRequest(),
            INFERENCE: lambda: inference_pb2.MultiInferenceRequest(),
            CLASSIFY: lambda: classification_pb2.ClassificationRequest(),
            REGRESSION: lambda: regression_pb2.RegressionRequest()
        }

        request = request_fn_map[self.prediction_type]()
        request.ParseFromString(serialized_data)

        return request
示例#15
0
def main(_):
    host, port = FLAGS.server.split(':')
    channel = implementations.insecure_channel(host, int(port))
    stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)

    request = classification_pb2.ClassificationRequest()

    example = request.input.example_list.examples.add()
    example.features.feature['petal_length'].float_list.value.extend([0.8])
    example.features.feature['petal_width'].float_list.value.extend([0.8])

    result = stub.Classify(request, 10.0)  # 10 secs timeout
    print(result)
    def _create_classification_request(self, data):
        if isinstance(data, classification_pb2.ClassificationRequest):
            return data

        request = classification_pb2.ClassificationRequest()
        request.model_spec.name = self.model_name
        request.model_spec.signature_name = self.signature_name

        feature_dict_list = self._create_feature_dict_list(data)

        examples = [_create_tf_example(feature_dict) for feature_dict in feature_dict_list]

        request.input.example_list.examples.extend(examples)

        return request
示例#17
0
    def _ExampleToClassificationRequest(
            self, example: tf.train.Example
    ) -> classification_pb2.ClassificationRequest:
        """Convert single Example to ClassificationRequest.

    Args:
      example: `Example` instance to convert.

    Returns:
      A converted `ClassificationRequest` instance.
    """
        request = classification_pb2.ClassificationRequest()
        request.model_spec.name = self._model_name
        request.model_spec.signature_name = self._signature_name
        request.input.example_list.examples.append(example)
        return request
示例#18
0
def main(_):

    host, port = FLAGS.server.split(':')
    channel = implementations.insecure_channel(host, int(port))
    stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)
    # Send request

    # See prediction_service.proto for gRPC request/response details.

    # Loop throught the images in folder

    dir_path = path
    # Read the directories available
    list_files = os.listdir(path)
    labels = []
    images = []
    f = open("answer.csv", "w+")
    # Label each folder with a value starting
    # from zero
    for i in list_files:
        print i + " loading ..."
        print i.split(".")
        f.write(i + "\n")

    sample_image1 = iR2G.img2gs_res(test_image, 28, 28)

    new_samples = np.array([sample_image1], dtype=np.float32)

    image = new_samples.reshape(784, )

    print(image.shape)

    # Feature_dict
    request = classification_pb2.ClassificationRequest()
    request.model_spec.name = 'cat_and_dog'
    request.model_spec.signature_name = 'serving_default'
    example = request.input.example_list.examples.add()
    example.features.feature['x'].float_list.value.extend(image.astype(float))
    result = stub.Classify(request, 10.0)  # 10 secs timeout

    label0_value = result.result.classifications[0].classes[0].score
    label1_value = result.result.classifications[0].classes[1].score

    if (label0_value > label1_value):
        print(0)
    else:
        print(1)
示例#19
0
def call_servo(examples, serving_bundle):
  """Send an RPC request to the Servomatic prediction service.

  Args:
    examples: A list of examples that matches the model spec.
    serving_bundle: A `ServingBundle` object that contains the information to
      make the serving request.

  Returns:
    A ClassificationResponse or RegressionResponse proto.
  """
  parsed_url = urlparse('http://' + serving_bundle.inference_address)
  channel = implementations.insecure_channel(parsed_url.hostname,
                                             parsed_url.port)
  stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)

  if serving_bundle.use_predict:
    request = predict_pb2.PredictRequest()
  elif serving_bundle.model_type == 'classification':
    request = classification_pb2.ClassificationRequest()
  else:
    request = regression_pb2.RegressionRequest()
  request.model_spec.name = serving_bundle.model_name
  if serving_bundle.model_version is not None:
    request.model_spec.version.value = serving_bundle.model_version
  if serving_bundle.signature is not None:
    request.model_spec.signature_name = serving_bundle.signature

  if serving_bundle.use_predict:
    # tf.compat.v1 API used here to convert tf.example into proto. This
    # utility file is bundled in the witwidget pip package which has a dep
    # on TensorFlow.
    request.inputs[serving_bundle.predict_input_tensor].CopyFrom(
      tf.compat.v1.make_tensor_proto(
        values=[ex.SerializeToString() for ex in examples],
        dtype=types_pb2.DT_STRING))
  else:
    request.input.example_list.examples.extend(examples)

  if serving_bundle.use_predict:
    return common_utils.convert_predict_response(
      stub.Predict(request, 30.0), serving_bundle) # 30 secs timeout
  elif serving_bundle.model_type == 'classification':
    return stub.Classify(request, 30.0)  # 30 secs timeout
  else:
    return stub.Regress(request, 30.0)  # 30 secs timeout
示例#20
0
def prepare_classify_requests(instances, model_name, model_version):
    request = classification_pb2.ClassificationRequest()
    request.model_spec.name = model_name

    if model_version is not None:
        request.model_spec.version = model_version

    instance_examples = []
    for instance in instances:
        feature_dict = {}
        for key, value in instance.items():
            if not isinstance(value, list):
                value = [value]
            feature_dict[key] = from_data_to_feature(np.array(value).ravel())
        instance_examples.append(tf.train.Example(features=tf.train.Features(feature=feature_dict)))

    request.input.CopyFrom(input_pb2.Input(example_list=input_pb2.ExampleList(examples=instance_examples)))
    return request
def run():
    token = 'Bearer eyJhbGciOiJSUzI1NiIsImtpZCI6ImxlZ2FjeS10b2tlbi1rZXkiLCJ0eXAiOiJKV1QifQ.eyJqdGkiOiI4Yzk3YWZjM2I4Mzk0N2E5OWVhMjMyNzE3Y2U3ZTFjMyIsInN1YiI6Im1scHRlc3RjbGllbnQiLCJhdXRob3JpdGllcyI6WyJtbHB0ZXN0Y2xpZW50Il0sInNjb3BlIjpbIm1scHRlc3RjbGllbnQiXSwiY2xpZW50X2lkIjoibWxwdGVzdGNsaWVudCIsImNpZCI6Im1scHRlc3RjbGllbnQiLCJhenAiOiJtbHB0ZXN0Y2xpZW50IiwiZ3JhbnRfdHlwZSI6ImNsaWVudF9jcmVkZW50aWFscyIsInJldl9zaWciOiJiNThjNWQ0ZiIsImlhdCI6MTQ4NjYzNDIzMSwiZXhwIjozNjMxMDgyMjMxLCJpc3MiOiJodHRwOi8vbG9jYWxob3N0OjgwODAvdWFhL29hdXRoL3Rva2VuIiwiemlkIjoidWFhIiwiYXVkIjpbIm1scHRlc3RjbGllbnQiXX0.nIl71Dxktizfb5B870Mlh_-62kN9_Wlda8WYbiz3iFaj22LzIUkQiRIAI57g3IwPXbJnJ1tlrf5_DIJpycRxzfxIZnW_GJW56sgY5L4mdPVHSIUHjeFh5v5tGwmOG6a1mYH_H0y8G-nHNolfSejcyvc4RYvcba4kS2nm-wDKKgfqDVaspM4Ktsa15eLHYn1P0LIUEsewTDm3qL_PgbJC3WKq_qgk02B5Or1n0doLkGBtccYlQEZ9lRixmkdov7_4Nl9UNTPgaYchC0AEaxd_RRCBK78FwC6tw3v1X3xJFXoYdJlMNOnTGdbQ4CVP5-Jd7gifPnUilPPPoJmITg0HZQ'
    metadata = [('authorization', token)]
    channel = grpc.insecure_channel("127.0.0.1:9000")
    stub = tensorflow__serving_dot_apis_dot_prediction_service__pb2.PredictionServiceStub(
        channel)

    request = tensorflow__serving_dot_apis_dot_classification__pb2.ClassificationRequest(
    )
    request.model_spec.name = "func.pkl"
    request.model_spec.signature_name = "classify_x_to_y"

    example = request.input.example_list.examples.add()
    example.features.feature["sepal length (cm)"].float_list.value.extend(
        [5.1, 7.0, 6.3, 4.6])
    example.features.feature["sepal width (cm)"].float_list.value.extend(
        [3.5, 3.2, 3.3, 3.1])
    example.features.feature["petal length (cm)"].float_list.value.extend(
        [1.4, 4.7, 6.0, 1.5])
    example.features.feature["petal width (cm)"].float_list.value.extend(
        [0.2, 1.4, 2.5, 0.2])

    response = stub.Classify(request)
    print(response)
示例#22
0
def run(host, port, input_str, model, signature_name):

    channel = implementations.insecure_channel(host, port)
    stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)

    # Pre-processing input
    prediction_input = [json.dumps(eval(input_str))]
    ink, classname = creat.parse_line(prediction_input[0])

    # encapsulate as tf.Example object
    classnames = ['doodle', 'expression', 'symbols']
    features = {}
    features["class_index"] = tf.train.Feature(int64_list=tf.train.Int64List(
        value=[classnames.index("doodle")]))
    features["ink"] = tf.train.Feature(float_list=tf.train.FloatList(
        value=ink.flatten()))
    features["shape"] = tf.train.Feature(int64_list=tf.train.Int64List(
        value=ink.shape))
    f = tf.train.Features(feature=features)
    example = tf.train.Example(features=f)
    final_req = [example]
    start = time.time()

    #generate request
    request = classification_pb2.ClassificationRequest()
    request.model_spec.name = model
    request.model_spec.signature_name = signature_name
    request.input.example_list.examples.extend(final_req)

    result = stub.Classify(request, 10.0)

    end = time.time()
    time_diff = end - start

    print(result)
    print('time elapased: {}'.format(time_diff))
示例#23
0
def wdl_sort_service(reco_set, temp, hbu):
    """
    wide&deep进行排序预测
    :return:
    """

    # 1.读取用户特征中心特征
    try:
        user_feature = eval(
            hbu.get_table_row('ctr_feature_user',
                              '{}'.format(temp.user_id).encode(),
                              'channel:{}'.format(temp.channel_id).encode()))
        logger.info(
            "{} INFO get user user_id:{} channel:{} profile data".format(
                datetime.now().strftime('%Y-%m-%d %H:%M:%S'), temp.user_id,
                temp.channel_id))

    except Exception as e:
        user_feature = []

    if user_feature:

        # 2.读取文章特征中心特征
        examples = []
        for article_id in reco_set:
            try:
                article_feature = eval(
                    hbu.get_table_row(
                        'ctr_feature_article',
                        '{}'.format(article_id).encode(),
                        'article:{}'.format(article_id).encode()))
            except Exception as e:
                article_feature = [0.0] * 111

            # 3.serving服务端每一篇文章的example样本构造,要按照训练样本时的顺序和名字
            channel_id = int(article_feature[0])
            vector = np.mean(article_feature[11:])
            user_weights = np.mean(user_feature)
            article_weights = np.mean(article_feature[1:11])

            # 组建example
            example = tf.train.Example(features=tf.train.Features(
                feature={
                    'channel_id':
                    tf.train.Feature(int64_list=tf.train.Int64List(
                        value=[channel_id])),
                    'vector':
                    tf.train.Feature(float_list=tf.train.FloatList(
                        value=[vector])),
                    'user_weights':
                    tf.train.Feature(float_list=tf.train.FloatList(
                        value=[user_weights])),
                    'article_weights':
                    tf.train.Feature(float_list=tf.train.FloatList(
                        value=[article_weights]))
                }))

            examples.append(example)

        # 模型服务调用
        with grpc.insecure_channel("0.0.0.0:8500") as channel:
            stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)

            # 用准备好的example封装成request请求
            # 并指定要调用的模型名称
            request = classification_pb2.ClassificationRequest()
            request.model_spec.name = 'wdl'
            request.input.example_list.examples.extend(examples)

            # 获取结果
            response = stub.Classify(request, 10.0)

            # 将结果与article_id组合,并按点击率大小排序,推荐出去
            num = len(reco_set)
            l = []

            for i in range(num):

                label_1 = max(response.result.classifications[i].classes,
                              key=lambda c: c.label)

                l.append(label_1.score)

            res = sorted(list(zip(reco_set, l)),
                         key=lambda c: c[1],
                         reverse=True)

            article_list = [index[0] for index in res]

            # 将排名前100个文章id返回给用户推荐
            if len(article_list) > 100:
                article_list = article_list[:100]

            reco_set = list(map(int, article_list))

        return reco_set
示例#24
0
def wdl_sort_service(reco_set, temp, hbu):
    #def wdl_sort_service():
    """
    wide&deep进行排序预测
    :param reco_set:
    :param temp:
    :param hbu:
    :return:
    """
    #hbu = HBaseUtils(pool)
    # 排序
    # 1、读取用户特征中心特征
    try:
        user_feature = eval(
            hbu.get_table_row(
                'ctr_feature_user',
                #'{}'.format(1105093883106164736).encode(),
                #'channel:{}'.format(18).encode()))
                '{}'.format(temp.user_id).encode(),
                'channel:{}'.format(temp.channel_id).encode()))
        # logger.info("{} INFO get user user_id:{} channel:{} profile data".format(
        #     datetime.now().strftime('%Y-%m-%d %H:%M:%S'), temp.user_id, temp.channel_id))
        print(user_feature)
    except Exception as e:
        user_feature = []
    if user_feature:
        # 2、读取文章特征中心特征
        result = []

        # examples
        examples = []
        #for article_id in [108245, 108246, 108242, 108240]:
        for article_id in reco_set:
            try:
                article_feature = eval(
                    hbu.get_table_row(
                        'ctr_feature_article',
                        '{}'.format(article_id).encode(),
                        'article:{}'.format(article_id).encode()))
            except Exception as e:
                article_feature = [0.0] * 111

            # article_feature结构: [channel, 10weights, 100vector]

            # 构造每一个文章与用户的example结构,训练样本顺序
            channel_id = int(article_feature[0])

            vector = np.mean(article_feature[11:])

            user_weights = np.mean(user_feature)

            article_weights = np.mean(article_feature[1:11])

            # 封装到example(一次一个样本)
            example = tf.train.Example(features=tf.train.Features(
                feature={
                    "channel_id":
                    tf.train.Feature(int64_list=tf.train.Int64List(
                        value=[channel_id])),
                    "vector":
                    tf.train.Feature(float_list=tf.train.FloatList(
                        value=[vector])),
                    'user_weights':
                    tf.train.Feature(float_list=tf.train.FloatList(
                        value=[user_weights])),
                    'article_weights':
                    tf.train.Feature(float_list=tf.train.FloatList(
                        value=[article_weights])),
                }))

            examples.append(example)

        # 所有的样本,放入一个列表中
        # 调用tensorflow serving的模型服务
        with grpc.insecure_channel("127.0.0.1:8500") as channel:
            stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)

            # 构造请求
            # 模型名称,将要预测的example样本列表
            request = classification_pb2.ClassificationRequest()
            request.model_spec.name = 'wdl'
            request.input.example_list.examples.extend(examples)

            # 发送请求
            response = stub.Classify(request, 10.0)
            print(response)
def transform_view():
    from flask import request
    csv = request.files['file']
    csv.save("Network_Test_Traffic.csv")
    chunksize = 23
    chunk_list = []
    missing_values = ["n/a", "na", "--", "Infinity", "infinity", "Nan", "NaN"]

    for chunk in pd.read_csv("Network_Test_Traffic.csv",
                             chunksize=chunksize,
                             na_values=missing_values):
        chunk_list.append(chunk)
        break
    dataFrme = pd.concat(chunk_list)

    lstcols = []
    for i in dataFrme.columns:
        i = str(i).replace(' ', '').replace('/', '')
        lstcols.append(i)
    dataFrme.columns = lstcols
    dataFrme = dataFrme[lstPred_col]
    df_predict_original = dataFrme.copy()
    scaler = MinMaxScaler(feature_range=(0, 1))
    selected_X = pd.DataFrame(scaler.fit_transform(dataFrme),
                              columns=dataFrme.columns,
                              index=dataFrme.index)
    df_predict1 = selected_X.copy()

    max_rows = 2800
    dataframes = []
    dataframes_original = []
    while len(df_predict1) > max_rows:
        top_original = df_predict_original[:max_rows]
        top = df_predict1[:max_rows]
        dataframes.append(top)
        dataframes_original.append(top_original)
        df_predict1 = df_predict1[max_rows:]
        df_predict_original = df_predict_original[max_rows:]
    else:
        dataframes.append(df_predict1)
        dataframes_original.append(df_predict_original)

    final_df = pd.DataFrame(columns=lstPred_col)
    for i, j in zip(dataframes, dataframes_original):
        j.index = pd.RangeIndex(len(i.index))
        j.index = range(len(i.index))
        examples = []
        for index, row in i.iterrows():
            example = tf.train.Example()
            for col, value in row.iteritems():
                example.features.feature[col].float_list.value.append(value)
            examples.append(example)

    channel = grpc.insecure_channel(server)
    stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
    request = classification_pb2.ClassificationRequest()
    request.model_spec.name = 'Model_Network'
    request.model_spec.signature_name = 'serving_default'
    request.input.example_list.examples.extend(examples)
    response = stub.Classify(request, 10.0)

    testdata = df_predict1[lstPred_col]
    outputs = testdata.copy()
    for index, row in outputs.iterrows():
        max_class = max(response.result.classifications[index].classes,
                        key=lambda c: c.score)
        outputs.loc[index, 'Location'] = get_key(int(max_class.label))
        outputs.loc[index, 'Probability'] = max_class.score
    print(outputs)

    final_df = pd.DataFrame(columns=[
        'BwdIATMean', 'BwdIATTot', 'BwdPktLenMax', 'BwdPktLenMean',
        'FlowDuration', 'FlowIATMean', 'FlowIATStd', 'FwdPSHFlags',
        'FwdSegSizeMin', 'InitBwdWinByts', 'Location', 'Probability'
    ])
    final_df = final_df.append(outputs, ignore_index=True)

    os.remove("Network_Test_Traffic.csv")
    return render_template('view.html',
                           tables=[final_df.to_html()],
                           titles=['na'])
示例#26
0
def wdl_sort_service():
    """
    wide&deep进行排序预测
    :param reco_set:
    :param temp:
    :param hbu:
    :return:
    """
    hbu = HBaseUtils(pool)
    # 排序
    # 1、读取用户特征中心特征 1115629498121846784
    try:
        user_feature = eval(hbu.get_table_row('ctr_feature_user',
                                              '{}'.format(1113244157343694848).encode(),
                                              'channel:{}'.format(18).encode()))
        # logger.info("{} INFO get user user_id:{} channel:{} profile data".format(
        #     datetime.now().strftime('%Y-%m-%d %H:%M:%S'), temp.user_id, temp.channel_id))
    except Exception as e:
        user_feature = []
    if user_feature:
        # 2、读取文章特征中心特征
        result = []

        # examples
        examples = []
        # for article_id in [17749, 17748, 44371, 44368]:
        for article_id in [22324, 22325, 22326, 22327]:
            try:
                article_feature = eval(hbu.get_table_row('ctr_feature_article',
                                                         '{}'.format(article_id).encode(),
                                                         'article:{}'.format(article_id).encode()))
            except Exception as e:

                article_feature = [0.0] * 111

            channel_id = int(article_feature[0])
            # 求出后面若干向量的平均值
            vector = np.mean(article_feature[11:])
            # 第三个用户权重特征10维
            user_feature = np.mean(user_feature)
            # 第四个文章权重特征10维
            article_feature = np.mean(article_feature[1:11])

            # 组建example
            example = tf.train.Example(features=tf.train.Features(feature={
                "channel_id": tf.train.Feature(int64_list=tf.train.Int64List(value=[channel_id])),
                "vector": tf.train.Feature(float_list=tf.train.FloatList(value=[vector])),
                'user_weigths': tf.train.Feature(float_list=tf.train.FloatList(value=[user_feature])),
                'article_weights': tf.train.Feature(float_list=tf.train.FloatList(value=[article_feature])),
            }))

            examples.append(example)

        with grpc.insecure_channel('127.0.0.1:8500') as channel:
            # 建立连接通道
            stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)

            # 获取测试数据集,并转换成 Example 实例
            # 构造 RPC 请求,指定模型名称。
            request = classification_pb2.ClassificationRequest()
            request.model_spec.name = 'wdl'
            request.input.example_list.examples.extend(examples)

            # 发送请求并获取结果
            response = stub.Classify(request, 10.0)
            print(response)

    return None
def wdl_sort_service():
    """
    wide&deep进行排序预测
    :param reco_set:
    :param temp:
    :param hbu:
    """
    hbu = HBaseUtils(pool)
    # 排序
    # 1、读取用户特征中心特征
    try:
        user_feature = eval(hbu.get_table_row('ctr_feature_user',
                                              '{}'.format(1115629498121846784).encode(),
                                              'channel:{}'.format(18).encode()))
        # logger.info("{} INFO get user user_id:{} channel:{} profile data".format(
        #     datetime.now().strftime('%Y-%m-%d %H:%M:%S'), temp.user_id, temp.channel_id))
    except Exception as e:
        user_feature = []
    if user_feature:
        # 2、读取文章特征中心特征
        result = []

        # examples
        examples = []
        for article_id in [17749, 17748, 44371, 44368]:
            try:
                article_feature = eval(hbu.get_table_row('ctr_feature_article',
                                                         '{}'.format(article_id).encode(),
                                                         'article:{}'.format(article_id).encode()))
            except Exception as e:
                article_feature = [0.0] * 111

            # article_feature结构: [channel, 10weights, 100vector]

            # 构造每一个文章与用户的example结构:和 wide_and_deep.py 中训练样本顺序,格式也必须相同(求了平均)
            channel_id = int(article_feature[0])

            vector = np.mean(article_feature[11:]) # 和训练时相同:求平均(简化工作)

            user_weights = np.mean(user_feature) # 和训练时相同:求平均(简化工作)

            article_weights = np.mean(article_feature[1:11]) # 和训练时相同:求平均(简化工作)

            # 封装到example(一次一个样本)
            example = tf.train.Example(features=tf.train.Features(feature={
                "channel_id": tf.train.Feature(int64_list=tf.train.Int64List(value=[channel_id])),
                "vector": tf.train.Feature(float_list=tf.train.FloatList(value=[vector])),
                'user_weights': tf.train.Feature(float_list=tf.train.FloatList(value=[user_weights])),
                'article_weights': tf.train.Feature(float_list=tf.train.FloatList(value=[article_weights])),
            }))

            examples.append(example)

        # 所有的样本,放入一个列表中
        # 调用tensorflow serving的模型服务
        with grpc.insecure_channel("127.0.0.1:8500") as channel:
            stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)

            # 构造请求
            request = classification_pb2.ClassificationRequest()
            request.model_spec.name = 'wdl' # 模型名称
            request.input.example_list.examples.extend(examples) # 将要预测的example样本列表

            # 发送请求:获取结果
            response = stub.Classify(request, 10.0)
            print(response)

    # 是要返回 response的预测结果的,但是现在没有运行,不知道response的数据结构,所以只能暂时返回None
    return None
示例#28
0
# -*- coding: utf-8 -*-

from tensorflow_serving.apis import classification_pb2
from tensorflow_serving.apis import regression_pb2
from tensorflow_serving.apis import predict_pb2
from tensorflow_serving.apis import prediction_service_pb2

from grpc.beta import implementations
'''
pip install tensorflow-serving-api
pip install grpcio
'''

channel = implementations.insecure_channel("0.0.0.0", int(8500))
stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)

request = classification_pb2.ClassificationRequest()
example = request.input.example_list.examples.add()
#example.features.feature['SepalLength'].float_list.value.extend([float(100)])

result = stub.Classify(request, 10.0)  # 10 secs timeout

print(result)
示例#29
0
def transform_view():

  request_file = request.files['data_file']
  request_file.save("iBeacon_RSSI_Unlabeled.csv")
  TF_MODEL_SERVER_HOST = os.getenv("TF_MODEL_SERVER_HOST", "127.0.0.1")
  TF_MODEL_SERVER_PORT = int(os.getenv("TF_MODEL_SERVER_PORT", 9000))
  BLE_RSSI_UL = pd.read_csv('iBeacon_RSSI_Unlabeled.csv')
  COLUMNS = list(BLE_RSSI_UL.columns)
  FEATURES = COLUMNS[2:]
  LABEL = [COLUMNS[0]]


  df_predict = pd.read_csv('iBeacon_RSSI_Unlabeled.csv')
  df_predict = df_predict.drop(['date','location'],axis = 1)
  df_predict_original = df_predict.copy()
  df_predict[FEATURES] = (df_predict[FEATURES] - df_predict[FEATURES].mean())/df_predict[FEATURES].std()

  max_rows = 2800
  dataframes = []
  dataframes_original = []

  while len(df_predict) > max_rows:
      top_original = df_predict_original[:max_rows]
      top = df_predict[:max_rows]
      dataframes.append(top)
      dataframes_original.append(top_original)
      df_predict = df_predict[max_rows:]
      df_predict_original = df_predict_original[max_rows:]
  else:
      dataframes.append(df_predict)
      dataframes_original.append(df_predict_original)

  final_df = pd.DataFrame(columns=['b3001','b3002','b3003','b3004','b3005','b3006','b3007','b3008','b3009','b3010','b3011','b3012','b3013','Location','Probability'])


  for i,j in zip(dataframes,dataframes_original):
      j.index = pd.RangeIndex(len(i.index))
      j.index = range(len(i.index))
     
      examples = []
      for index, row in i.iterrows():
          example = tf.train.Example()
          for col, value in row.iteritems():
              example.features.feature[col].float_list.value.append(value)
          examples.append(example)
      channel = implementations.insecure_channel(TF_MODEL_SERVER_HOST, TF_MODEL_SERVER_PORT)
      stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)
      request1 = classification_pb2.ClassificationRequest()
      request1.model_spec.name = "blerssi"
      request1.model_spec.signature_name = "serving_default"
      request1.input.example_list.examples.extend(examples)
      response = stub.Classify(request1, 10.0)
      outputs = j.copy()
      for index, row in outputs.iterrows():
          max_class = max(response.result.classifications[index].classes, key=lambda c: c.score)
          outputs.loc[index, 'Location'] = get_key(int(max_class.label))
          outputs.loc[index, 'Probability'] = max_class.score

      final_df =  final_df.append(outputs,ignore_index=True)

  return render_template('view.html',tables=[final_df.to_html()],titles = ['na'])
def transform_view():
    from flask import request
    csv = request.files['file']
    csv.save("iBeacon_RSSI_Unlabeled.csv")
    BLE_RSSI_UL = pd.read_csv("iBeacon_RSSI_Unlabeled.csv",
                              encoding='utf8')  # Unlabeled dataset
    COLUMNS = list(BLE_RSSI_UL.columns)
    FEATURES = COLUMNS[2:]
    LABEL = [COLUMNS[0]]

    # Data Preprocesssing
    df_predict = BLE_RSSI_UL  # Unlabeled dataset
    df_predict = df_predict.drop(['date', 'location'], axis=1)
    df_predict_original = df_predict.copy()
    # df_predict[FEATURES] = (df_predict[FEATURES] - df_predict[FEATURES].mean()) / df_predict[FEATURES].std()
    df_predict[FEATURES] = (df_predict[FEATURES]) / (-200)

    max_rows = 2800
    dataframes = []
    dataframes_original = []
    while len(df_predict) > max_rows:
        top_original = df_predict_original[:max_rows]
        top = df_predict[:max_rows]
        dataframes.append(top)
        dataframes_original.append(top_original)
        df_predict = df_predict[max_rows:]
        df_predict_original = df_predict_original[max_rows:]
    else:
        dataframes.append(df_predict)
        dataframes_original.append(df_predict_original)

#    server = str(TF_MODEL_SERVER_HOST)+":"+str(TF_MODEL_SERVER_PORT)
    final_df = pd.DataFrame(columns=[
        'b3001', 'b3002', 'b3003', 'b3004', 'b3005', 'b3006', 'b3007', 'b3008',
        'b3009', 'b3010', 'b3011', 'b3012', 'b3013', 'Location', 'Probability'
    ])
    for i, j in zip(dataframes, dataframes_original):
        j.index = pd.RangeIndex(len(i.index))
        j.index = range(len(i.index))
        examples = []
        for index, row in i.iterrows():
            example = tf.train.Example()
            for col, value in row.iteritems():
                example.features.feature[col].float_list.value.append(value)
            examples.append(example)


#      channel = grpc.insecure_channel(server)
        stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
        request = classification_pb2.ClassificationRequest()
        request.model_spec.name = 'Model_Blerssi'
        request.model_spec.signature_name = 'serving_default'
        request.input.example_list.examples.extend(examples)
        response = stub.Classify(request, 10.0)

        outputs = j.copy()
        for index, row in outputs.iterrows():
            max_class = max(response.result.classifications[index].classes,
                            key=lambda c: c.score)
            outputs.loc[index, 'Location'] = get_key(int(max_class.label))
            outputs.loc[index, 'Probability'] = max_class.score
        print(outputs)

        final_df = final_df.append(outputs, ignore_index=True)

    os.remove("iBeacon_RSSI_Unlabeled.csv")
    return render_template('view.html',
                           tables=[final_df.to_html()],
                           titles=['na'])