def send_analysis_request(request): source = str(request.user) warnings = [] try: data_received = request.data logger.info(f"send_analysis_request received request from {source}." f"Data:{dict(data_received)}.") test = data_received.get("test", False) params = {"source": source} serializer = serializers.JobSerializer(data=data_received, context={"request": request}) if serializer.is_valid(): serialized_data = serializer.validated_data logger.info(f"serialized_data: {serialized_data}") # some values are mandatory only in certain cases if serialized_data["is_sample"]: if "file" not in data_received: return Response({"error": "810"}, status=status.HTTP_400_BAD_REQUEST) if "file_mimetype" not in data_received: serialized_data[ "file_mimetype"] = helpers.calculate_mimetype( data_received["file"], data_received.get("file_name", "")) else: if "observable_name" not in data_received: return Response({"error": "812"}, status=status.HTTP_400_BAD_REQUEST) if "observable_classification" not in data_received: return Response({"error": "813"}, status=status.HTTP_400_BAD_REQUEST) # we need to clean the list of requested analyzers, # ... based on configuration data analyzers_config = helpers.get_analyzer_config() run_all_available_analyzers = serialized_data.get( "run_all_available_analyzers", False) analyzers_requested = serialized_data.get("analyzers_requested", []) if run_all_available_analyzers: if analyzers_requested: logger.info( """either you specify a list of requested analyzers or the 'run_all_available_analyzers' parameter, not both""") return Response({"error": "816"}, status=status.HTTP_400_BAD_REQUEST) # just pick all available analyzers analyzers_requested = [ analyzer_name for analyzer_name in analyzers_config ] cleaned_analyzer_list = helpers.filter_analyzers( serialized_data, analyzers_requested, analyzers_config, warnings, run_all=run_all_available_analyzers, ) params["analyzers_to_execute"] = cleaned_analyzer_list if len(cleaned_analyzer_list) < 1: logger.info("""after the filter, no analyzers can be run. Try with other analyzers""") return Response({"error": "814"}, status=status.HTTP_400_BAD_REQUEST) # save the arrived data plus new params into a new job object serializer.save(**params) job_id = serializer.data.get("id", None) md5 = serializer.data.get("md5", "") logger.info(f"New Job added with ID: #{job_id} and md5: {md5}.") if not job_id: return Response({"error": "815"}, status=status.HTTP_400_BAD_REQUEST) else: error_message = f"serializer validation failed: {serializer.errors}" logger.error(error_message) return Response({"error": error_message}, status=status.HTTP_400_BAD_REQUEST) is_sample = serializer.data.get("is_sample", False) if not test: general.start_analyzers( params["analyzers_to_execute"], analyzers_config, serialized_data["runtime_configuration"], job_id, md5, is_sample, ) response_dict = { "status": "accepted", "job_id": job_id, "warnings": warnings, "analyzers_running": cleaned_analyzer_list, } logger.debug(response_dict) return Response(response_dict, status=status.HTTP_200_OK) except Exception as e: logger.exception( f"receive_analysis_request requester:{source} error:{e}.") return Response( {"detail": "error in send_analysis_request. Check logs"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR, )
def send_analysis_request(request): """ This endpoint allows to start a Job related to a file or an observable :param is_sample: bool is a sample (file) or an observable (domain, ip, ...) :param md5: string md5 of the item to analyze :param [file]: binary required if is_sample=True, the binary :param [file_mimetype]: string optional, the binary mimetype, calculated by default :param [file_name]: string optional if is_sample=True, the binary name :param [observable_name]: string required if is_sample=False, the observable value :param [observable_classification]: string required if is_sample=False, (domain, ip, ...) :param [analyzers_requested]: list list of requested analyzer to run, before filters :param [tags_id]: list<int> list of id's of tags to apply to job :param [run_all_available_analyzers]: bool default False :param [private]: bool default False, enable it to allow view permissions to only requesting user's groups. :param [force_privacy]: bool default False, enable it if you want to avoid to run analyzers with privacy issues :param [disable_external_analyzers]: bool default False, enable it if you want to exclude external analyzers :param: [runtime_configuration]: dict default {}, contains additional parameters for particular analyzers :param [test]: bool disable analysis for API testing :return 202: if accepted :return 500: if failed """ source = str(request.user) warnings = [] try: data_received = request.data logger.info(f"send_analysis_request received request from {source}." f"Data:{dict(data_received)}.") test = data_received.get("test", False) params = {"source": source} serializer = serializers.JobSerializer(data=data_received, context={"request": request}) if serializer.is_valid(): serialized_data = serializer.validated_data logger.info(f"serialized_data: {serialized_data}") # some values are mandatory only in certain cases if serialized_data["is_sample"]: if "file" not in data_received: return Response({"error": "810"}, status=status.HTTP_400_BAD_REQUEST) if "file_mimetype" not in data_received: serialized_data[ "file_mimetype"] = helpers.calculate_mimetype( data_received["file"], data_received.get("file_name", "")) else: if "observable_name" not in data_received: return Response({"error": "812"}, status=status.HTTP_400_BAD_REQUEST) if "observable_classification" not in data_received: return Response({"error": "813"}, status=status.HTTP_400_BAD_REQUEST) # we need to clean the list of requested analyzers, # ... based on configuration data analyzers_config = helpers.get_analyzer_config() run_all_available_analyzers = serialized_data.get( "run_all_available_analyzers", False) analyzers_requested = serialized_data.get("analyzers_requested", []) if run_all_available_analyzers: if analyzers_requested: logger.info( """either you specify a list of requested analyzers or the 'run_all_available_analyzers' parameter, not both""") return Response({"error": "816"}, status=status.HTTP_400_BAD_REQUEST) # just pick all available analyzers analyzers_requested = [ analyzer_name for analyzer_name in analyzers_config ] cleaned_analyzer_list = helpers.filter_analyzers( serialized_data, analyzers_requested, analyzers_config, warnings, run_all=run_all_available_analyzers, ) params["analyzers_to_execute"] = cleaned_analyzer_list if len(cleaned_analyzer_list) < 1: logger.info("""after the filter, no analyzers can be run. Try with other analyzers""") return Response({"error": "814"}, status=status.HTTP_400_BAD_REQUEST) # save the arrived data plus new params into a new job object serializer.save(**params) job_id = serializer.data.get("id", None) md5 = serializer.data.get("md5", "") logger.info(f"New Job added with ID: #{job_id} and md5: {md5}.") if not job_id: return Response({"error": "815"}, status=status.HTTP_400_BAD_REQUEST) else: error_message = f"serializer validation failed: {serializer.errors}" logger.error(error_message) return Response({"error": error_message}, status=status.HTTP_400_BAD_REQUEST) is_sample = serializer.data.get("is_sample", False) if not test: general.start_analyzers( params["analyzers_to_execute"], analyzers_config, serialized_data["runtime_configuration"], job_id, md5, is_sample, ) response_dict = { "status": "accepted", "job_id": job_id, "warnings": warnings, "analyzers_running": cleaned_analyzer_list, } logger.debug(response_dict) return Response(response_dict, status=status.HTTP_200_OK) except Exception as e: logger.exception( f"receive_analysis_request requester:{source} error:{e}.") return Response( {"detail": "error in send_analysis_request. Check logs"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR, )
def send_analysis_request(request): """ This API allows to start a Job related to a file or an observable data_received parameters: :parameter: is_sample: is a sample (file) or an observable (domain, ip, ...) :parameter: md5: string, md5 of the item to analyze :parameter: [file]: binary, required if is_sample=True, the binary :parameter: [file_mimetype]: string, required if is_sample=True, the binary mimetype :parameter: [file_name]: string, optional if is_sample=True, the binary name :parameter: [observable_name]: string, required if is_sample=False, the observable value :parameter: [observable_classification]: string, required if is_sample=False, (domain, ip, ...) :parameter: [analyzers_requested]: list of requested analyzer to run, before filters :parameter: [run_all_available_analyzers]: bool, default False :parameter: [force_privacy]: boolean, default False, enable it if you want to avoid to run analyzers with privacy issues :parameter: [disable_external_analyzers]: boolean, default False, enable it if you want to exclude external analyzers :parameter: [test]: disable analysis for API testing :return: 202 if accepted, 500 if failed """ source = str(request.user) warnings = [] try: data_received = request.data logger.info( "send_analysis_request received request from {}. Data:{}".format( source, dict(data_received))) test = data_received.get('test', False) params = {'source': source} serializer = serializers.JobSerializer(data=data_received) if serializer.is_valid(): serialized_data = serializer.validated_data logger.info("serialized_data: {}".format(serialized_data)) # some values are mandatory only in certain cases if serialized_data['is_sample']: if 'file' not in data_received: return Response({"error": "810"}, status=status.HTTP_400_BAD_REQUEST) if 'file_mimetype' not in data_received: return Response({"error": "811"}, status=status.HTTP_400_BAD_REQUEST) else: if 'observable_name' not in data_received: return Response({"error": "812"}, status=status.HTTP_400_BAD_REQUEST) if 'observable_classification' not in data_received: return Response({"error": "813"}, status=status.HTTP_400_BAD_REQUEST) # we need to clean the list of requested analyzers, based on configuration data analyzers_config = utilities.get_analyzer_config() run_all_available_analyzers = serialized_data[ 'run_all_available_analyzers'] analyzers_requested = serialized_data.get('analyzers_requested', []) if run_all_available_analyzers: if analyzers_requested: logger.info( "either you specify a list of requested analyzers or the" " 'run_all_available_analyzers' parameter, not both") return Response({"error": "816"}, status=status.HTTP_400_BAD_REQUEST) # just pick all available analyzers analyzers_requested = [ analyzer_name for analyzer_name in analyzers_config ] cleaned_analyzer_list = utilities.filter_analyzers( serialized_data, analyzers_requested, analyzers_config, warnings, run_all=run_all_available_analyzers) params['analyzers_to_execute'] = cleaned_analyzer_list if len(cleaned_analyzer_list) < 1: logger.info( "after the filter, no analyzers can be run. Try with other analyzers" ) return Response({"error": "814"}, status=status.HTTP_400_BAD_REQUEST) # save the arrived data plus new params into a new job object serializer.save(**params) job_id = serializer.data.get('id', '') md5 = serializer.data.get('md5', '') logger.info("new job_id {} for md5 {}".format(job_id, md5)) if not job_id: return Response({"error": "815"}, status=status.HTTP_400_BAD_REQUEST) else: error_message = "serializer validation failed: {}".format( serializer.errors) logger.info(error_message) return Response(error_message, status=status.HTTP_400_BAD_REQUEST) is_sample = serializer.data.get('is_sample', '') if not test: general.start_analyzers(params['analyzers_to_execute'], analyzers_config, job_id, md5, is_sample) response_dict = { "status": "accepted", "job_id": job_id, "warnings": warnings, "analyzers_running": cleaned_analyzer_list } logger.debug(response_dict) return Response(response_dict, status=status.HTTP_200_OK) except Exception as e: logger.exception( "receive_analysis_request requester:{} error:{}".format(source, e)) return Response( {"error": "error in send_analysis_request. Check logs"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)