Exemplo n.º 1
0
def predict():

    log = Log()
    msg = __name__ + '.' + utils.get_function_caller() + ' -> enter'
    log.print(msg)

    # get data
    json_data = request.get_json(force=True)

    msg = 'json_data: ', json_data
    log.print(msg)

    user_id = json_data['user_id']
    random_state = 42

    input_df = pd.json_normalize(json_data)

    # save json_data and input_df for debugging purpose, save using unique name
    json_data_unique_filename = config.PATH_TO_DATASET + utils.get_unique_filename(
        'json_data.json')
    input_df_unique_filename = config.PATH_TO_DATASET + utils.get_unique_filename(
        'input_df.csv')

    with open(json_data_unique_filename, 'w') as outfile:
        json.dump(json_data, outfile)

    input_df = input_df[list(config.ACCEPTABLE_COLUMNS)]
    input_df.to_csv(input_df_unique_filename, index=False)

    prediction = Prediction(user_id, input_df_unique_filename, random_state)
    predictions, labels = prediction.predict()

    result = {'prediction': int(predictions[0]), 'label': str(labels[0])}

    #dummy
    # result = {'prediction': 1, 'label': 'Good Loan'}
    output = {'result': result}
    output = result

    msg = __name__ + '.' + utils.get_function_caller() + ' -> exit'
    log.print(msg)

    return jsonify(results=output)
Exemplo n.º 2
0
    def test_get_unique_filename(self, time_):
        # Setup
        time_.return_value = 11111122222

        # Execute
        filename = get_unique_filename('abc.txt')
        origin_filename = base64.b64decode(filename)\
            .decode('utf-8')

        # Verify
        assert_equal(len(origin_filename), 7)
        assert_equal(origin_filename, 'abc.txt')
 def test(self,
          model_path,
          sample_path,
          input_noise=None,
          input_labels=None,
          count=0):
     sess, last_saved_step = self.restore_model(model_path)
     with sess:
         print("A sample is saved in {:s}".format(
             utils.save_image(
                 utils.get_unique_filename(sample_path),
                 self.generate_images(sess, input_noise, input_labels,
                                      count))))
Exemplo n.º 4
0
def upload():
    # check if the post request has the file part
    if 'file' not in request.files or not request.files['file']:
        file_not_supported()
    file = request.files['file']
    if file and allowed_file(file.filename):
        # disable temp folder auto deletion
        with tempfile.NamedTemporaryFile(delete=False) as t:
            file.save(t)
            return jsonify({
                'success': True,
                'file': get_unique_filename(t.name)
            })
    else:
        file_not_supported()
Exemplo n.º 5
0
def upload():
    results = []

    files = []
    for file in request.files.getlist(config['upload_field']):
        if file and file.filename and is_allowed_file(
                file.filename, config['allowed_extensions']):
            # filename = secure_filename(file.filename)
            basename, ext = os.path.splitext(file.filename)
            filename = get_unique_filename(ext)
            filename = os.path.join(config['upload_folder'], filename)
            file.save(filename)
            files.append((basename, filename))

    for basename, path in files:
        result = check(basename, path, config['epubcheck_path'],
                       stdout_encoding)
        results.append(result)

    session['results'] = json.dumps(results, ensure_ascii=False)
    # return redirect(url_for('validate', results=results))
    return redirect(url_for('validate'))
Exemplo n.º 6
0
def handleUpload():
    if 'x_ray_image' not in request.files:
        print('no file found in upload request')
        return jsonify("{'message': 'no file in upload request'}")
    file = request.files['x_ray_image']

    if file.filename == '':
        return jsonify({'message': 'No file name provided'})

    file.filename = get_unique_filename() + '.' + file.filename.split('.')[-1]

    if file and allowed_file_extensions(file.filename):
        if not os.path.exists(os.getenv('UPLOAD_FOLDER')):
            os.makedirs(os.getenv('UPLOAD_FOLDER'))
        uploaded_file_path = os.path.join(os.getenv('UPLOAD_FOLDER'), file.filename)
        file.save(uploaded_file_path)
        prediction, all_predictions = perform_inference(uploaded_file_path)
        s3_url = upload_to_s3(uploaded_file_path, str(prediction) + '__' + str(file.filename))

        print('*****************')
        print('*****************')
        print('*****************')
        print(prediction)
        print('*****************')
        print('*****************')
        print('*****************')

        return app.response_class(
            response=json.dumps({"prediction": str(prediction), "file_url": str(s3_url), "all": all_predictions}),
            status=200,
            mimetype='application/json'
        )
    else:
        return app.response_class(
            response=json.dumps({"message": "invalid file provided"}),
            status=500,
            mimetype='application/json'
        )
Exemplo n.º 7
0
	if dataset_path.exists():		

		pipeline = Pipeline(user_id=user_id, path_to_dataset=dataset_path, random_state=random_state, test_size=0.2)
		pipeline.train()
	
		# data dummy for prediction demo
		# should be in json format as it will be fed into API, will be managed by controlled later on
		json_data = pd.read_csv(dataset_path)[0:1].to_json(orient='records')[1:-1].replace('},{', '} {')
		json_data = json.loads(json_data)
		print('Predict:',json_data)

		# controller will convert json data to dataframe		
		input_df = pd.json_normalize(json_data)

		# save json_data and input_df for debugging purpose, save using unique name
		json_data_unique_filename = config.PATH_TO_DATASET+utils.get_unique_filename('json_data.json')
		input_df_unique_filename = config.PATH_TO_DATASET+utils.get_unique_filename('input_df.csv')		
		
		with open(json_data_unique_filename, 'w') as outfile:
			json.dump(json_data, outfile)

		input_df.to_csv(input_df_unique_filename, index=False)

		
		prediction = Prediction(user_id, input_df_unique_filename, random_state)
		predictions, labels = prediction.predict()

		print('predictions:',predictions)
		print('prediction labels:',labels)	
		print('ground truth:',json_data[config.TARGET_COLUMN])