Пример #1
0
def download_object(url, params=None, object_type=None):
    try:
        data = requests.get(url=url, params=params).json()
        return utils.deserialize_json(object_type, data)
    except Exception as e:
        traceback.print_exc()
        return None
Пример #2
0
def run():
    '''
    Model inference loop.
    Dequeues from Redis job queue and predicts.
    Enqueues prediction back into Redis job queue.
    '''
    # Redis look-up prediction loop
    while True:
        # Retrieve job from Redis, FIFO
        try:
            job = redis.dequeue('job_queue')
            job_obj = deserialize_json(job)
            job_id = job_obj['job_id']
            results = {}

            if 'data' in job_obj:
                # Model prediction
                data = job_obj['data']
                X = np.array([data])
                pred = int(xgb_model.predict(X)[0])
                results = {'predictions': {'prediction': pred, 'name': label_names[pred]}}
                print("Data {} Results {}".format(data, results))
            # Push prediction result to Redis
            redis.set(job_id, serialize_obj(results), expiry=300)

        except Exception as e:
            print("Unknown error occured. {}".format(e))
            continue
Пример #3
0
async def get_group_by_id(request):
    async with request.app['db_pool'].acquire() as conn:
        group_id = int(request.match_info['group_id'])
        try:
            group = await db.get_group_by_id(conn, group_id)
        except db.RecordNotFound as e:
            raise web.HTTPNotFound(text=str(e))

    return web.json_response(deserialize_json(group))
Пример #4
0
async def create_group(request):
    data = await request.json()
    employees = parse_file(data['file'])
    async with request.app['db_pool'].acquire() as conn:
        db_group = deserialize_json(await db.get_last_group(conn))
        # print('CALLED  create_group()')
        new_group = create_new_group(*db_group, employees)
        await db.create_data(conn, new_group)
    return web.json_response(employees)
def predict():
    '''
    Decorator function for /api/predict route.
    Takes in a JSON request with 'data' as key to an array of four floats:
    petal length, petal width, sepal length, sepal width
    Creates a job and enqueues into Redis, waits for it to be done up till MAX_TRIES

    Input: {'data': [float, float, float, float]}
    Returns: {'prediction': int, 'name': str}, int
    '''
    results = {'prediction': -1, 'name': 'nil'}
    response_code = 500

    # Generate a random UUID job id
    job_id = generate_uuid_string()

    try:
        # Retrieve data from request JSON
        data = request.json['data']

        # Create a job to put into Redis
        job = serialize_obj({'job_id': job_id, 'data': data})
        redis.enqueue('job_queue', job)

        response = None
        # Try to get done job from Redis
        for i in range(MAX_TRIES):
            done_job = redis.get(job_id)
            if done_job is not None:
                redis.delete(job_id)
                response = deserialize_json(done_job)
                break
            else:
                time.sleep(SLEEP_DURATION)

        # Return null response and server error
        if response is None:
            return jsonify(results), 500

        else:
            # Prediction server success
            if len(response.keys()) > 0:
                results = response['predictions']
                results = {'prediction': int(results['prediction']), 'name': results['name']}

            return jsonify(results), 200

    except Exception as e:
        print("Exception occurred {}".format(e))
        return jsonify(results), 500
Пример #6
0
async def get_all_groups(request):
    async with request.app['db_pool'].acquire() as conn:
        db_all_groups = deserialize_json(await db.get_all_groups(conn))
    return web.json_response(db_all_groups)