Example #1
0
def estimate(messages, inference_type):

    if not isinstance(messages, list):
        messages = [messages]

    instances = list(map(lambda message: json.loads(message), messages))

    source_ids = list(
        map(lambda instance: instance.pop('source_id'), instances))

    source_timestamps = list(
        map(lambda instance: instance.pop('source_timestamp'), instances))

    if inference_type == 'local':
        estimated_weights = inference.estimate_local(instances)
    elif inference_type == 'cmle':
        estimated_weights = inference.estimate_cmle(instances)
    else:
        estimated_weights = 'NA'

    for i in range(len(instances)):
        instance = instances[i]
        instance['estimated_weight'] = estimated_weights[i]
        instance['source_id'] = source_ids[i]
        instance['source_timestamp'] = source_timestamps[i]
        instance['predict_timestamp'] = datetime.utcnow().strftime(
            '%Y-%m-%d %H:%M:%S')

    logging.info(instances)

    return instances
def estimate(instance, inference_type):
    """
    Estimate the baby weight given an instance
    If inference type is 'cmle', the model API deployed on Coud ML Engine is called,
    otherwise, the local model is called

    Agrs:
        instance: dictionary of values representing the input features of the instance
        inference_type: cane be 'local or 'cmle'
    Returns:
         int - baby weight estimate from the model
    """

    # pop the actual weight if exists
    weight_pounds = instance.pop('weight_pounds', 'NA')

    if inference_type == 'local':
        estimated_weights = inference.estimate_local([instance])
    elif inference_type == 'cmle':
        estimated_weights = inference.estimate_cmle([instance])
    else:
        estimated_weights = 'NA'

    instance['estimated_weight'] = estimated_weights[0]
    instance['weight_pounds'] = weight_pounds[0]

    return instance
def estimate(messages, inference_type):

    if not isinstance(messages, list):
        messages = [messages]

    instances = list(
        map(lambda message: json.loads(message),
            messages)
    )

    source_ids = list(
        map(lambda instance: instance.pop('source_id'),
            instances)

    )

    source_timestamps = list(
        map(lambda instance: instance.pop('source_timestamp'),
            instances)
    )

    if inference_type == 'local':
        estimated_weights = inference.estimate_local(instances)
    elif inference_type == 'cmle':
        estimated_weights = inference.estimate_cmle(instances)
    else:
        estimated_weights = 'NA'

    for i in range(len(instances)):
        instance = instances[i]
        instance['estimated_weight'] = estimated_weights[i]
        instance['source_id'] = source_ids[i]
        instance['source_timestamp'] = source_timestamps[i]
        instance['predict_timestamp'] = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')

    logging.info(instances)

    return instances
Example #4
0
        'plurality': 1.0,
        'gestation_weeks': 39,
        'mother_married': 'True',
        'cigarette_use': 'False',
        'alcohol_use': 'False'
      }
]

print("")
print("Inference Type:{}".format(INFERENCE_TYPE))
print("")

time_start = datetime.utcnow()
print("Inference started at {}".format(time_start.strftime("%H:%M:%S")))
print(".......................................")


for i in range(10):
    if INFERENCE_TYPE == 'local':
        output = inference.estimate_local(instances)
    else:
        output = inference.estimate_cmle(instances)
    print(output)

time_end = datetime.utcnow()
print(".......................................")
print("Inference finished at {}".format(time_end.strftime("%H:%M:%S")))
print("")
time_elapsed = time_end - time_start
print("Inference elapsed time: {} seconds".format(time_elapsed.total_seconds()))
        'plurality': 1.0,
        'gestation_weeks': 39,
        'mother_married': 'True',
        'cigarette_use': 'False',
        'alcohol_use': 'False'
      }
]

print("")
print("Inference Type:{}".format(INFERENCE_TYPE))
print("")

time_start = datetime.utcnow()
print("Inference started at {}".format(time_start.strftime("%H:%M:%S")))
print(".......................................")


for i in range(10):
    if INFERENCE_TYPE == 'local':
        output = inference.estimate_local(instances)
    else:
        output = inference.estimate_cmle(instances)
    print(output)

time_end = datetime.utcnow()
print(".......................................")
print("Inference finished at {}".format(time_end.strftime("%H:%M:%S")))
print("")
time_elapsed = time_end - time_start
print("Inference elapsed time: {} seconds".format(time_elapsed.total_seconds()))