def login(): email = g.json.get('email') password = g.json.get('password') user = services.user.get_by_email(email) if not user: logging.debug(f"No user found for {email}") return handle_error(401, 'Incorrect user or password') if not services.user.verify_password(user, password): logging.warning(f"Incorrect password for {email}") return handle_error(401, 'Incorrect user or password') if not user.confirmed: logging.warning(f"User {user.email} hasn't been confirmed!") return handle_error(401, f'Please confirm user {user.email} first') token = services.user.generate_auth_token(user, expiration=TOKEN_EXPIRATION) ascii_token = token.decode('ascii') response = ApiResponse( content_type=request.accept_mimetypes.best, next=url_for('customer.dashboard'), context={'token': ascii_token} ) response.set_cookie( 'token', ascii_token, expires=datetime.datetime.now() + datetime.timedelta(minutes=TOKEN_EXPIRATION) ) return response()
def logout(): response = ApiResponse( content_type=request.accept_mimetypes.best, next=url_for('main.login') ) response.set_cookie('token', '', expires=0) return response()
def current(): current_datasource = g.user.company.current_datasource response = ApiResponse( content_type=request.accept_mimetypes.best, context=current_datasource, ) return response()
def submit(upload_code): user_id = g.user.id training_task_code = g.json.get('training_task_code') detection_name = g.json.get('name') training_task = app.services.training.get_training_for_task_code( training_task_code) datasource = services.datasource.get_by_upload_code(upload_code) detection_task = services.detection.trigger_detection( detection_name, datasource, training_task, user_id) # This should actually live inside the trigger detection function... detect_celery_task.apply_async((detection_task.task_code, )) response = ApiResponse(content_type=request.accept_mimetypes.best, next=url_for('detection.detail', task_code=detection_task.task_code), context={ 'task_code': detection_task.task_code, 'task_status': url_for('detection.detail', task_code=detection_task.task_code, _external=True), 'result': url_for('detection.result', task_code=detection_task.task_code, _external=True) }) return response()
def configuration_detail(company_id): configuration = services.company.get_configurations_by_company_id( company_id) if not configuration: logging.debug(f"No configuration found for company id {company_id}") abort(404, 'No such configuration found') return ApiResponse(content_type=request.accept_mimetypes.best, context=configuration)()
def delete(task_code): detection_task = services.detection.get_task_by_code(task_code) services.detection.delete(detection_task) response = ApiResponse(content_type=request.accept_mimetypes.best, next=url_for('detection.list'), status_code=200) flash(f"Task version {task_code} has been deleted", category='success') return response()
def result(task_code): detection_result = services.detection.get_result_by_code(task_code) if not detection_result: logging.debug(f"No result was found for task code {task_code}") abort(404, 'No result found!') response = ApiResponse(content_type=request.accept_mimetypes.best, context=detection_result) return response()
def get(datasource_id): datasource = services.datasource.get_by_upload_code(datasource_id) if not datasource: logging.debug(f"No datasource was found for id {datasource_id}") abort(404, 'No data source found!') response = ApiResponse(content_type=request.accept_mimetypes.best, context=datasource) return response()
def result(task_code): """ Get the result of an individual task """ prediction_result = services.prediction.get_result_by_code(task_code) if not prediction_result: logging.debug(f"No result was found for task code {task_code}") abort(404, 'No result found!') response = ApiResponse(content_type=request.accept_mimetypes.best, context=prediction_result) return response()
def get_configuration(datasource_configuration_id): company_id = g.user.company_id datasource_configuration = DataSourceConfigurationEntity.get_for_id( datasource_configuration_id) if not datasource_configuration: return handle_error(404, 'No datasource configuration found') if not datasource_configuration.company_id == company_id: return handle_error(403, 'Unauthorised') response = ApiResponse(content_type=request.accept_mimetypes.best, context=datasource_configuration) return response()
def get_single_task(task_code): prediction_task = services.prediction.get_task_by_code(task_code) if not prediction_task: logging.debug(f"No task found for code {task_code}") abort(404, 'No task found!') if not prediction_task.company_id == g.user.company_id: abort(403) response = ApiResponse( content_type=request.accept_mimetypes.best, context=prediction_task, ) return response()
def delete(datasource_id): datasource = services.datasource.get_by_upload_code(datasource_id) if datasource.is_original: message = f"Tried to delete original ingestion datasource: {datasource_id}" logging.debug(message) return handle_error(400, message) services.datasource.delete(datasource) response = ApiResponse(content_type=request.accept_mimetypes.best, next=url_for('customer.list_datasources'), status_code=200) flash(f"Historical data version {datasource_id} has been deleted", category='success') return response()
def confirm(token): email = confirm_token(token) if not email: abort(401, 'Unauthorised') user = services.user.get_by_email(email) if user.confirmed: abort(400, 'User was already confirmed') user = services.user.confirm(user) logging.info("User %s successfully confirmed!", user.email) response = ApiResponse(content_type=request.accept_mimetypes.best, template='user/confirmed.html', next=url_for('main.login')) return response()
def list(): PER_PAGE = 10 current_page = int(request.args.get('page', 1)) query = services.detection.filter_by_company_id(DetectionTaskEntity.query, g.user.company.id) pagination = paginate(query, current_page, PER_PAGE) response = ApiResponse(content_type=request.accept_mimetypes.best, template='detection/list.html', context={ 'detection_task_list': pagination.items, 'pagination': pagination, 'current_page': current_page }) return response()
def register(): email = g.json.get('email') password = g.json.get('password') assert email and password, abort( 400, 'Please specify a user email and password') company = services.company.get_for_email(email) if not company: logging.warning("No company could be found for %s", email) abort(400, f"No company could be found for {email}") if not is_valid_email_for_company(email, company): logging.warning("Invalid email %s for company: %s", email, company.domain) abort(401, f"Invalid email {email} for company: {company.domain}") user = services.user.get_by_email(email) if user is not None: abort(400, 'Cannot register an existing user!') user = User(email=email, confirmed=False, company_id=company.id, password=password) user = services.user.insert(user) confirmation_token = generate_confirmation_token(user.email) logging.info("Confirmation token for %s: %s", user.email, confirmation_token) # Only admins can create users for now #services.email.send_confirmation_email(user.email, confirmation_token) response = ApiResponse(content_type=request.accept_mimetypes.best, next=url_for('main.login'), status_code=201, context={ 'email': user.email, 'id': user.id, 'confirmation_token': confirmation_token }) return response()
def submit(): company_id = g.user.company.id # the user can only predict against the _latest_ datasource datasource_id = g.user.company.current_datasource.id prediction_request, errors = PredictionRequestSchema().load(g.json) if errors: return jsonify(errors=errors), 400 task_code = services.prediction.generate_task_code() logging.warning("Generated task code was %s", task_code) prediction_task = services.prediction.create_prediction_task( task_name=prediction_request['name'], task_code=task_code, company_id=company_id, user_id=g.user.id, datasource_id=datasource_id, ) services.prediction.set_task_status(prediction_task, TaskStatusTypes.queued) upload_code = g.user.company.current_datasource.upload_code training_and_prediction_task.apply_async( (task_code, company_id, upload_code, prediction_request)) response = ApiResponse(content_type=request.accept_mimetypes.best, next=url_for('customer.dashboard'), context={ 'task_code': task_code, 'task_status': url_for('prediction.get_single_task', task_code=task_code, _external=True), 'result': url_for('prediction.result', task_code=task_code, _external=True) }) return response()
def delete(datasource_id): upload_manager = services.company.get_upload_manager( g.user.company.current_configuration) datasource = services.datasource.get_by_upload_code(datasource_id) if datasource.is_part_of_training_set: return handle_error( 400, f"Flight {datasource.name} is part of training set, cannot be deleted" ) services.datasource.delete(datasource) upload_manager.cleanup(datasource.location) response = ApiResponse(content_type=request.accept_mimetypes.best, next=url_for('datasource.list'), status_code=200) flash(f"Flight {datasource.name} version {datasource_id} has been deleted", category='success') return response()
def detail(task_code): detection = services.detection.get_task_by_code(task_code) diagnostic_task = services.diagnostic.get_task_by_code(task_code) training_task = app.services.training.get_training_for_id( detection.training_task_id) if not detection: logging.debug(f"No task found for code {task_code}") return handle_error(404, 'No task found!') if not detection.company_id == g.user.company_id: return handle_error(403, "Unauthorised") response = ApiResponse(content_type=request.accept_mimetypes.best, context={ 'detection': detection, 'diagnostic': diagnostic_task, 'training_task': training_task }, template='detection/detail.html') return response()
def configuration_update(company_id): company = services.company.get_by_id(company_id) if not company: return handle_error(404, "No company could be found!") configuration_request = g.json data, errors = ModelConfigurationSchema().load(configuration_request) if errors or not data: logging.debug(f"Invalid configuration supplied: {str(errors)}") return abort(400, f"Invalid configuration: {str(errors)}") configuration = CompanyConfiguration(company_id=company_id, user_id=g.user.id, configuration=json_reload(data)) configuration = services.company.insert_configuration(configuration) response = ApiResponse(content_type=request.accept_mimetypes.best, context=configuration.configuration, status_code=201) return response()
def register(): company_name = g.json.get('name') domain = g.json.get('domain') if not (company_name and domain): logging.debug("Company name and/or domain weren't supplied") abort( 400, 'Request error: please specify company name and company domain.') existing_company = services.company.get_for_domain(domain) if existing_company: logging.debug(f"Cannot recreate an existing company: {domain}") abort(400, 'Unable to create existing company') company = Company(name=company_name, domain=domain) company = services.company.insert(company) response = ApiResponse(content_type=request.accept_mimetypes.best, context=company, status_code=201) return response()
def submit_configuration(): company_id = g.user.company_id configuration_name = g.json.get('name') # TODO: use a schema for this configuration_metadata = g.json.get('meta') if not configuration_metadata or not configuration_name: return handle_error(400, 'Error: You must specify name and meta values') new_datasource_configuration = DataSourceConfigurationEntity( company_id=company_id, name=configuration_name, meta=configuration_metadata) new_datasource_configuration.save() response = ApiResponse( content_type=request.accept_mimetypes.best, next=url_for('datasource.list'), context={'datasource_config': new_datasource_configuration}, status_code=201) return response()
def list(): PER_PAGE = 10 company_id = g.user.company_id if not g.user.company.current_configuration: return handle_error( 404, 'No company configuration found, please provide one.') upload_manager = services.company.get_upload_manager( g.user.company.current_configuration) current_page = int(request.args.get('page', 1)) query = services.datasource.filter_by_company_id(DataSourceEntity.query, company_id) pagination = paginate(query, current_page, PER_PAGE) response = ApiResponse( content_type=request.accept_mimetypes.best, context={ 'datasources': pagination.items, 'pagination': pagination, 'current_page': current_page, 'allowed_extensions': ','.join([ ".{}".format(ext) for ext in upload_manager.allowed_extensions ]), 'max_file_size': 2147483648, 'datasource_types': DataSourceConfigurationEntity.get_for_company_id(company_id).all() }, template='datasource/list.html') return response()
def detail(upload_code): datasource = services.datasource.get_by_upload_code(upload_code) if not datasource: logging.debug(f"No flight was found for code {upload_code}") return handle_error(404, 'No flight found!') if not datasource.company_id == g.user.company_id: return handle_error(403, "Unauthorised") query = TrainingTaskEntity.query query = services.training.filter_by_datasource_configuration_id( query, datasource.datasource_configuration_id) query = services.training.filter_by_company_id(query, datasource.company_id) query = services.training.filter_by_status(query, TaskStatusTypes.successful) query.order_by(TrainingTaskEntity.id.desc()) training_task_list = query.all() response = ApiResponse( content_type=request.accept_mimetypes.best, context={ 'datasource': datasource, 'label_types': {member.name: member.value for member in LabelTypes}, 'detection_task_list': services.detection.get_task_for_datasource_id(datasource.id), 'training_task_list': training_task_list }, template='datasource/detail.html') return response()
def save_label(): upload_code = g.json.get('upload_code') datasource_label = g.json.get('datasource_label') datasource_entity = DataSourceEntity.query.filter( DataSourceEntity.upload_code == upload_code).one_or_none() if not datasource_entity: logging.debug(f"No datasource was found for code {upload_code}") return handle_error(404, 'No data source found!') if not datasource_entity.company_id == g.user.company_id: return handle_error(403, "Unauthorised") if datasource_label: try: label_type = LabelTypes[datasource_label] datasource_entity.label = label_type except KeyError: return handle_error( 404, f'Invalid label {datasource_label} for datasource') else: datasource_entity.label = None datasource_entity.update() datasource = services.datasource.get_by_upload_code(upload_code) flash(f"Flight label has been set to '{datasource_label}'", category='success') response = ApiResponse(content_type=request.accept_mimetypes.best, next=url_for('datasource.detail', upload_code=upload_code), context={'datasource': datasource}, status_code=201) return response()
def upload(): user = g.user company = user.company company_configuration = company.current_configuration if not company_configuration: return handle_error( 400, f"{company.name} cannot upload historical data yet, please contact support." ) if not len(request.files): logging.debug("No file was uploaded") return handle_error(400, "No file Provided!") uploaded_file = request.files['upload'] if not allowed_extension(uploaded_file.filename): logging.debug(f"Invalid extension for upload {uploaded_file.filename}") return handle_error( 400, f'File extension for {uploaded_file.filename} not allowed!') upload_code = generate_upload_code() filename = services.datasource.generate_filename( upload_code, secure_filename(uploaded_file.filename)) interpreter = services.company.get_datasource_interpreter( company_configuration) target_feature = company_configuration.configuration.target_feature data_frame, errors = interpreter.from_csv_to_dataframe(uploaded_file) if errors: logging.debug(f"Invalid file uploaded: {', '.join(errors)}") return handle_error(400, ', '.join(errors)) if not target_feature in list(data_frame.columns): return handle_error( 400, f"Required feature {target_feature} not in {uploaded_file.filename}" ) features = list(data_frame.columns) if user.company.current_datasource: data_source = services.datasource.get_by_upload_code( user.company.current_datasource.upload_code) existing_data_frame = data_source._model.get_file() data_frame = pd.concat([existing_data_frame, data_frame]) saved_path = os.path.join(current_app.config['UPLOAD_FOLDER'], filename + '.hdf5') data_frame = data_frame.reset_index() data_frame = data_frame.drop_duplicates() data_frame = data_frame.set_index(interpreter.INDEX_COLUMN) data_frame = data_frame.sort_index(ascending=True) data_frame.to_hdf(saved_path, key=current_app.config['HDF5_STORE_INDEX']) original = True if len(company.data_sources) == 0 else False upload = DataSource( user_id=user.id, company_id=company.id, upload_code=upload_code, type=UploadTypes.FILESYSTEM, location=saved_path, filename=filename, start_date=data_frame.index[0].to_pydatetime(), end_date=data_frame.index[-1].to_pydatetime(), is_original=original, features=', '.join(features), target_feature=target_feature, ) datasource = services.datasource.insert(upload) upload_strategy_class = company_configuration.configuration.upload_strategy upload_strategy = services.strategies.get_upload_strategy( upload_strategy_class) upload_strategy.run(datasource=datasource, company_configuration=company_configuration) response = ApiResponse(content_type=request.accept_mimetypes.best, context=datasource, next=url_for('customer.list_datasources'), status_code=201) return response()
def current_configuration(): response = ApiResponse(content_type=request.accept_mimetypes.best, context=g.user.company.current_configuration) return response()
def list_customer_actions(): response = ApiResponse(content_type=request.accept_mimetypes.best, context=g.user.company.actions) return response()
def show_current_company_info(): response = ApiResponse(content_type=request.accept_mimetypes.best, context=g.user.company) return response()
def render_400(e): response = ApiResponse(content_type=request.accept_mimetypes.best, context={'message': e.description}, template='400.html', status_code=400) return response()
def upload(): user = g.user company = user.company datasource_name = g.json.get('name') if not datasource_name: return handle_error(400, "Please provide a name for the data upload") if services.datasource.datasource_name_exists(datasource_name): return handle_error( 400, f"A data source for {datasource_name} already exists") datasource_type_id = g.json.get('datasource_type_id') if not datasource_type_id: return handle_error(400, "Please provide a datasource type id!") datasource_type = DataSourceConfigurationEntity.get_for_id( datasource_type_id) if not datasource_type: return handle_error( 404, f"No datasource type found for {datasource_type_id}") if not len(request.files): logging.debug("No file was uploaded") return handle_error(400, "No file Provided!") company_configuration = company.current_configuration uploaded_file = request.files['upload'] if not company_configuration: uploaded_file.close() return handle_error( 400, f"{company.name} cannot upload data yet, please contact support.") upload_manager = services.company.get_upload_manager(company_configuration) try: uploaded_dataframe = upload_manager.validate(uploaded_file, datasource_type_id) except Exception as e: message = f"Data validation has failed: {str(e)}" return handle_error(400, message) upload_code = generate_upload_code() try: datasource = services.datasource.save_datasource( datasource_name, company.id, datasource_type_id, upload_code, upload_manager, uploaded_dataframe, user) except Exception as e: upload_manager.cleanup(upload_code) return handle_error(400, str(e)) flash("Flight uploaded successfully.", category='success') response = ApiResponse(content_type=request.accept_mimetypes.best, next=url_for('datasource.detail', upload_code=upload_code), context={'datasource': datasource}, status_code=201) return response()