def post(self, request, endpoint_name, format=None): algorithm_status = self.request.query_params.get( "status", "production") print(algorithm_status) algorithm_version = self.request.query_params.get("version") print(algorithm_version) algs = MLAlgorithm.objects.all() print(algs) print(endpoint_name) print(algorithm_status) alg_index = 0 print(algs[alg_index]) print(algs[alg_index].id) print(registry.endpoints) algorithm_object = registry.endpoints[next(iter(registry.endpoints))] prediction = algorithm_object.compute_prediction(request.data) print("HEresjas") label = prediction["label"] if "label" in prediction else "error" ml_request = MLRequest( input_data=json.dumps(request.data), full_response=prediction, response=label, feedback="", parent_mlalgorithm=algs[alg_index], ) ml_request.save() prediction["request_id"] = ml_request.id return Response(prediction)
def post(self, request, endpoint_name, format=None): algorithm_status = request.query_params.get("status", "production") algorithm_version = request.query_params.get("version") algs = MLAlgorithmStatus.objects.filter( parent_mlalgorithm__parent_endpoint__name=endpoint_name, status=algorithm_status, active=True) print(algs) if algorithm_version is not None: algs = algs.filter(version=algorithm_version) if len(algs) == 0: raise Exception("No version defined") alg_index = 0 registry = joblib.load('registry.joblib') algorithm_object = registry[algs[alg_index].id] prediction = algorithm_object.compute_prediction(request.data) if prediction["label"]: label = prediction["label"] else: raise Exception("Bad Request") ml_req = MLRequest( input_data=json.dumps(request.data), full_response=prediction, response=label, feedback="", parent_mlalgorithm=algs[alg_index].parent_mlalgorithm) ml_req.save() prediction['request_id'] = ml_req.id return Response(prediction, status=status.HTTP_200_OK)
def post(self, request, endpoint_name, format=None): algorithm_status = self.request.query_params.get( "status", "production") algorithm_version = self.request.query_params.get("version") algs = MLAlgorithm.objects.filter( parent_endpoint__name=endpoint_name, mlalgorithmstatus__status=algorithm_status, mlalgorithmstatus__active=True) if algorithm_version is not None: algs = algs.filter(version=algorithm_version) if len(algs) == 0: content = { "status": "Error", "message": "ML algorithm is not available" } return Response( content, status=status.HTTP_400_BAD_REQUEST, ) if len(algs) != 1 and algorithm_status != "ab_testing": content = { "status": "Error", "message": "ML algorithm selection is ambiguous. Please specify algorithm version." } return Response( content, status=status.HTTP_400_BAD_REQUEST, ) alg_index = 0 if algorithm_status == "ab_testing": alg_index = 0 if rand() < 0.5 else 1 algorithm_object = registry.endpoints[algs[alg_index].id] prediction = algorithm_object.compute_prediction(request.data) label = prediction["label"] if "label" in prediction else "error" ml_request = MLRequest( input_data=json.dumps(request.data), full_response=prediction, response=label, feedback="", parent_algorithm=algs[alg_index], ) ml_request.save() prediction["request_id"] = ml_request.id return Response(prediction)
def post(self, request, endpoint_name, format=None): # Get the status from query parameters algorithm_status = self.request.query_params.get( "status", "production") # Get the version from query parameters algorithm_version = self.request.query_params.get("version") # Filter algorithms by endpoint_name, algorithm_status, and active status algs = MLAlgorithm.objects.filter(parent_endpoint__name=endpoint_name, status__status=algorithm_status, status__active=True) # Filter algorithms by algorithm version if it is not None if algorithm_version is not None: algs = algs.filter(version=algorithm_version) # If no algorithms exist if len(algs) == 0: return Response( { "status": "Error", "message": "ML algorithm is not available" }, status=status.HTTP_400_BAD_REQUEST, ) # If more than one algorithms exist and # status query parameter has a value other than "ab_testing" if len(algs) != 1 and algorithm_status != "ab_testing": return Response( { "status": "Error", "message": "ML algorithm selection is ambiguous. Please specify algorithm version." }, status=status.HTTP_400_BAD_REQUEST, ) alg_index = 0 if algorithm_status == "ab_testing": alg_index = 0 if rand() < 0.5 else 1 # Find the algorithm from the ML registry algorithm_object = registry.endpoints[algs[alg_index].id] # Get prediction on request data prediction = algorithm_object.compute_prediction(request.data) label = prediction["label"] if "label" in prediction else "error" ml_request = MLRequest( input_data=json.dumps(request.data), full_response=prediction, response=label, feedback="", parent_mlalgorithm=algs[alg_index], ) ml_request.save() prediction["request_id"] = ml_request.id return Response(prediction)
def post(self, request, endpoint_name, format=None): algorithm_status = self.request.query_params.get( "status", "production") algorithm_version = self.request.query_params.get("version") algs = MLAlgorithm.objects.filter(parent_endpoint__name=endpoint_name, status__status=algorithm_status, status__active=True) print(algorithm_status, algorithm_version, algs[0].id) if algorithm_version is not None: algs = algs.filter(version=algorithm_version) if len(algs) == 0: return Response( { "status": "Error", "message": "ML algorithm is not available" }, status=status.HTTP_400_BAD_REQUEST, ) # if len(algs) != 1 and algorithm_status != "ab_testing": # return Response( # {"status": "Error", "message": "ML algorithm selection is ambiguous. Please specify algorithm version."}, # status=status.HTTP_400_BAD_REQUEST, # ) alg_index = 0 if algorithm_status == "ab_testing": alg_index = 0 if rand() < 0.5 else 1 algorithm_object = registry.endpoints[algs[5].id] item_serializer = ItemSerializer(data=request.data) item_serializer.is_valid(raise_exception=True) likes = item_serializer.data['likes_array'] dislikes = item_serializer.data['dislikes_array'] request_dict = {} request_dict["likes"], request_dict["dislikes"] = likes, dislikes prediction = algorithm_object.compute_prediction(request_dict) #label = prediction["label"] if "label" in prediction else "error" ml_request = MLRequest( input_data=json.dumps(request.data), full_response=prediction, response=prediction, feedback="", parent_mlalgorithm=algs[alg_index], ) ml_request.save() return Response(prediction)
def post(self, request, algorithm_name): algorithm_status = self.request.query_params.get("status", "production") algorithm_version = self.request.query_params.get("version") algs = MachineLearningAlgorithm.objects.filter( name=algorithm_name, status__status=algorithm_status, status__active=True, ) if algorithm_version is not None: algs = algs.filter(version=algorithm_version) if len(algs) == 0: return Response( {"status": "Error", "message": "ML algorithm is not available"}, status=status.HTTP_400_BAD_REQUEST, ) if len(algs) != 1: return Response( { "status": "Error", "message": "ML algorithm selection is ambiguous. Please specify algorithm version.", }, status=status.HTTP_400_BAD_REQUEST, ) alg_index = 0 algorithm_object = registry.endpoints[algs[alg_index].id] prediction = algorithm_object.compute_prediction(request.data) label = prediction["label"] if "label" in prediction else "error" ml_request = MLRequest( input_data=json.dumps(request.data), full_response=prediction, response=label, feedback="", parent_algorithm=algs[alg_index], ) ml_request.save() prediction["request_id"] = ml_request.id return Response(prediction)
def post(self, request, endpoint_name, format=None): algorithm_status = self.request.query_params.get('status', 'production') algorithm_version = self.request.query_params.get('version', '0.0.1') algs = MLAlgorithm.objects.filter(parent_endpoint__name = endpoint_name, status__status = algorithm_status, status__active = True) if algorithm_version is not None: algs = algs.filter(version = algorithm_version) if len(algs) == 0: return Response( {'status': 'Error', 'message': 'ML algorithm is not available'}, status = status.HTTP_400_BAD_REQUEST, ) if len(algs) != 1 and algorithm_status != 'ab_testing': return Response( {'status': 'Error', 'message': 'ML algorithm selection is ambiguous. Please specify algorithm version'}, status=status.HTTP_400_BAD_REQUEST, ) alg_index = 0 if algorithm_status == 'ab_testing': alg_index = 0 if rand() < 0.5 else 1 algorithm_object = registry.endpoints[algs[alg_index].id] prediction = algorithm_object.compute_prediction(request.data) label = prediction['label'] if 'label' in prediction else 'error' ml_request = MLRequest( input_data = json.dumps(request.data), full_response = prediction, response = label, feedback = '', parent_mlalgorithm = algs[alg_index], ) ml_request.save() prediction['request_id'] = ml_request.id return Response(prediction)
def post(self, request, endpoint_name, format=None): algorithm_status = self.request.query_params.get( "status", "production") # print("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA") algs = MLAlgorithm.objects.filter( parent_endpoint__name=endpoint_name, mlalgo_status__status=algorithm_status, mlalgo_status__active=True) # print(len(MLAlgorithm.objects.filter(parent_endpoint__name = endpoint_name, mlalgo_status__status = algorithm_status, mlalgo_status__active=True))) # print(len(algs)) if len(algs) == 0: return Response( { "status": "Error", "message": "ML algorithm is not available" }, status=status.HTTP_400_BAD_REQUEST, ) if len(algs) != 1 and algorithm_status != "ab_testing": return Response( { "status": "Error", "message": "ML algorithm selection is ambiguous. Please specify algorithm version." }, status=status.HTTP_400_BAD_REQUEST, ) algorithm_object = registry.endpoints[algs[0].id] info = algorithm_object.Predict(request.data) info["action req"] = "Dont Advertise" if info[ 'label'] == 0 else "Advertise" ml_request = MLRequest( input_data=json.dumps(request.data), full_response=info, response=info['label'], feedback="", parent_mlalgorithm=algs[0], ) ml_request.save() return Response(info)
def post(self, request, endpoint_name, format=None): algorithm_status = self.request.query_params.get( "status", "production") algorithm_version = self.request.query_params.get("version") algs = MLAlgorithm.objects.filter( parent_endpoint__name=endpoint_name, mlalgorithmstatus__status=algorithm_status, mlalgorithmstatus__active=True) if algorithm_version is not None: algs = algs.filter(version=algorithm_version) if len(algs) == 0: return Response( {"status": "Error", "message": "ML algorithm is not availiable"}, status=status.HTTP_400_BAD_REQUEST, ) alg_index = 0 if algorithm_status == "ab_testing": alg_index = 0 if np.random.rand() < 0.5 else 1 algorithm_object = registry.endpoints[algs[alg_index].id] prediction = algorithm_object.compute_prediction(request.data) label = prediction['label'] if 'label' in prediction else 'error' ml_request = MLRequest( input_data=json.dumps(request.data), full_response=prediction, response=label, feedback="", parent_mlalgorithm=algs[alg_index], ) ml_request.save() prediction['request_id'] = ml_request.id return Response(prediction)