def check_status(task_id, start_time, query_id): from app.celery_tasks.run_ml_task import ml_task_runner task = ml_task_runner.AsyncResult(task_id) if task.state == 'PENDING': # job did not start yet response = { 'state': task.state, 'current': 0, 'total': 100, 'status': 'Pending...' } elif task.state != 'FAILURE': response = { 'state': task.state, 'current': task.info.get('current', 0), 'total': task.info.get('total', 100), 'status': task.info.get('status', '') } if 'result' in task.info: response['result'] = task.info['result'] else: # something went wrong in the background job response = { 'state': task.state, 'current': 100, 'total': 100, 'status': str(task.info), # this is the exception raised } logSuccess(query_id, start_time) return jsonify(response), 200
def storage_insert(parameters, start_time, query_id): if 'file' not in request.files or not request.files['file'].filename: code = 400 logFail(query_id, start_time, code) abort( code, f"Bad request body. Expected {parameters.get('extension')} file " f"with key 'file' and correct filename in request body.") filename = request.files['file'].filename \ if not request.headers.get('filename') \ else request.headers.get('filename') if not isinstance(filename, str) or len(filename) > 99: code = 400 logFail(query_id, start_time, code) abort(code, "Too long or bad filename.") abs_path_filename = os.path.join(parameters.get('folder'), filename) if os.path.isfile(abs_path_filename): os.remove(abs_path_filename) request.files['file'].save(abs_path_filename) logSuccess(query_id, start_time) return '', 204
def result_list(start_time, query_id): try: # Соединение таблиц и получение информации personinf = db.session.query(Results)\ .filter_by(personEmail=request.headers.get('email')) \ .order_by(Results.datetime) \ .all() except HTTPException as ex: logFail(query_id, start_time, ex.code) raise logSuccess(query_id, start_time) if not personinf: return jsonify("Empty set"), 200 return jsonify([{ 'model': result.model, 'preprocessor': result.preprocessor, 'resource': result.resource, 'datetime': result.datetime, 'accuracy': result.accuracy, 'precision': result.precision, 'recall': result.recall, 'roc_auc': result.roc_auc, 'log_loss': result.log_loss } for result in personinf]), 200
def run_prediction(start_time, query_id): request_data = request.get_json() if not request_data.get('model'): code = 400 logFail(query_id, start_time, code) abort(code, f"Specify {model_parameters.get('entity')} in the request.") if not request_data.get('preprocessor'): code = 400 logFail(query_id, start_time, code) abort(code, f"Specify {prep_parameters.get('entity')} in the request.") if not request_data.get('resource'): code = 400 logFail(query_id, start_time, code) abort(code, f"Specify {res_parameters.get('entity')} in the request.") model_path = os.path.join(model_parameters.get('folder'), request_data.get('model')) if not os.path.isfile(model_path): code = 404 logFail(query_id, start_time, code) abort(code, f"No {model_parameters.get('entity')} with this filename.") prep_path = os.path.join(prep_parameters.get('folder'), request_data.get('preprocessor')) if not os.path.isfile(prep_path): code = 404 logFail(query_id, start_time, code) abort(code, f"No {prep_parameters.get('entity')} with this filename.") res_path = os.path.join(res_parameters.get('folder'), request_data.get('resource')) if not os.path.isfile(res_path): code = 404 logFail(query_id, start_time, code) abort(code, f"No {res_parameters.get('entity')} with this filename.") parameters = { 'model': model_path, 'preprocessor': prep_path, 'resource': res_path, 'personEmail': request.headers.get('email'), 'dateFrom': request_data.get('dateFrom'), 'timeFrom': request_data.get('timeFrom'), 'dateTo': request_data.get('dateTo'), 'timeTo': request_data.get('timeTo'), 'ticker': request_data.get('ticker') } # в Celery task = ml_task_runner.apply_async(args=[parameters]) # -------------------------- logSuccess(query_id, start_time) return jsonify(task_id=task.id), 202
def upload_file(start_time, query_id): if 'file' not in request.files or not request.files['file'].filename: code = 400 logFail(query_id, start_time, code) abort( code, "Bad request body. Expected file with key 'file' and correct filename in request body." ) filename = request.files['file'].filename \ if not request.headers.get('filename') \ else request.headers.get('filename') if not isinstance(filename, str) or len(filename) > 99: code = 400 logFail(query_id, start_time, code) abort(code, "Too long or bad filename.") filepath = f"/tmp/{uuid.uuid4()}" request.files['file'].save(filepath) post_params = { 'filename': filename, 'filepath': filepath, 'personEmail': request.headers.get('email') } task = post_task.apply_async(args=[post_params]) logSuccess(query_id, start_time) return jsonify(task_id=task.id), 202
def storage_list(parameters, start_time, query_id): files = next(os.walk(parameters.get('folder')))[2] logSuccess(query_id, start_time) if not files: return jsonify("Empty set"), 200 return jsonify(files), 200
def storage_delete(parameters, filename, start_time, query_id): if not isinstance(filename, str) or len(filename) > 99: code = 400 logFail(query_id, start_time, code) abort(code, "Too long or bad filename.") abs_path_filename = os.path.join(parameters.get('folder'), filename) if not os.path.isfile(abs_path_filename): code = 404 logFail(query_id, start_time, code) abort(code, f"No {parameters.get('entity')} with this filename.") os.remove(abs_path_filename) logSuccess(query_id, start_time) return '', 204
def change_file(fileid, start_time, query_id): if not Files.query.filter_by(fileid=fileid).all(): code = 400 logFail(query_id, start_time, code) abort(code, "No file with such fileID in database.") if 'file' not in request.files or not request.files['file'].filename: code = 400 logFail(query_id, start_time, code) abort( code, "Bad request body. Expected file with key 'file' and correct filename in request body." ) filename = request.files['file'].filename \ if not request.headers.get('filename') \ else request.headers.get('filename') if not isinstance(filename, str) or len(filename) > 99: code = 400 logFail(query_id, start_time, code) abort(code, "Too long or bad filename.") filepath = f"/tmp/{uuid.uuid4()}" request.files['file'].save(filepath) put_params = { 'filename': filename, 'filepath': filepath, 'fileid': fileid, 'personEmail': request.headers.get('email') } # Изменение в бд put_task.apply_async(args=[put_params]) logSuccess(query_id, start_time) return '', 204
def delete_file(fileid, start_time, query_id): if not Files.query.filter_by(fileid=fileid).all(): code = 404 logFail(query_id, start_time, code) abort(code, "No file with such fileID in database.") # Удаление из бд try: with transaction(): db.session.query(Data).filter_by(fileid=fileid).delete( synchronize_session="fetch") db.session.query(Files).filter_by(fileid=fileid).delete( synchronize_session="fetch") except HTTPException as ex: logFail(query_id, start_time, ex.code) raise db.session.commit() logSuccess(query_id, start_time) return '', 204
def files_list(start_time, query_id): if not Files.query.all(): logSuccess(query_id, start_time) return jsonify("Empty set.") try: # Соединение таблиц и получение информации fileinf = db.session.query(Files, func.count(Data.fileid).label('count_rows'))\ .join(Files.data)\ .group_by(Files.fileid)\ .all() except HTTPException as ex: logFail(query_id, start_time, ex.code) raise logSuccess(query_id, start_time) return jsonify([{ "fileid": fileinf[i][0].fileid, "filename": fileinf[i][0].filename, "count_rows": fileinf[i].count_rows, } for i in range(0, len(fileinf))]), 200
def file_info(fileid, start_time, query_id): if not Files.query.filter_by(fileid=fileid).all(): code = 404 logFail(query_id, start_time, code) abort(code, "No file with such fileID in database.") try: # Соединение таблиц и получение информации fileinf = db.session.query(Files, func.count(Data.fileid).label('count_rows'))\ .join(Files.data)\ .group_by(Files.fileid)\ .having(Files.fileid == fileid)\ .first() except HTTPException as ex: logFail(query_id, start_time, ex.code) raise logSuccess(query_id, start_time) return jsonify(fileid=int(fileid), filename=fileinf[0].filename, first_download=fileinf[0].first_download, last_download=fileinf[0].last_download, data_count=fileinf.count_rows), 200