Exemple #1
0
def yara_update_repos():
    logger.info("started pulling images from yara public repos")
    analyzer_config = utilities.get_analyzer_config()
    found_yara_dirs = []
    for analyzer_name, analyzer_config in analyzer_config.items():
        if analyzer_name.startswith('Yara_Scan'):
            yara_dirs = analyzer_config.get('additional_config_params',
                                            {}).get('git_repo_main_dir', [])
            if not yara_dirs:
                # fall back to required key
                yara_dirs = analyzer_config.get('additional_config_params',
                                                {}).get(
                                                    'directories_with_rules',
                                                    [])
                found_yara_dirs.extend(yara_dirs)
                # customize it as you wish
                for yara_dir in yara_dirs:
                    if os.path.isdir(yara_dir):
                        repo = Repo(yara_dir)
                        o = repo.remotes.origin
                        o.pull()
                        logger.info("pull repo on {} dir".format(yara_dir))
                    else:
                        logger.warning(
                            "yara dir {} does not exist".format(yara_dir))

    return found_yara_dirs
Exemple #2
0
def get_analyzer_configs(request):
    """
    get the uploaded analyzer configuration,
    can be useful if you want to choose the analyzers programmatically

    :return 200:
        if ok
    :return 500:
        if failed
    """
    try:
        logger.info(f"get_analyzer_configs received request from {str(request.user)}.")

        analyzers_config = utilities.get_analyzer_config()

        return Response(analyzers_config)

    except Exception as e:
        logger.exception(
            f"get_analyzer_configs requester:{str(request.user)} error:{e}."
        )
        return Response(
            {"error": "error in get_analyzer_configs. Check logs."},
            status=status.HTTP_500_INTERNAL_SERVER_ERROR,
        )
Exemple #3
0
def get_analyzer_configs(request):
    """
    get the uploaded analyzer configuration, can be useful if you want to choose the analyzers programmatically

    :return: 200 if ok, 500 if failed
    """
    source = str(request.user)
    try:
        logger.info(
            "get_analyzer_configs received request from {}".format(source))

        analyzers_config = utilities.get_analyzer_config()

        return Response(analyzers_config, status=status.HTTP_200_OK)

    except Exception as e:
        logger.exception("get_analyzer_configs requester:{} error:{}".format(
            source, e))
        return Response({"error": "error in get_analyzer_configs. Check logs"},
                        status=status.HTTP_500_INTERNAL_SERVER_ERROR)
Exemple #4
0
 def test_config(self):
     config = get_analyzer_config()
     self.assertNotEqual(config, {})
Exemple #5
0
def send_analysis_request(request):
    """
    This API allows to start a Job related to a file or an observable

    data_received parameters:
    :parameter: is_sample: is a sample (file) or an observable (domain, ip, ...)
    :parameter: md5: string, md5 of the item to analyze
    :parameter: [file]: binary, required if is_sample=True, the binary
    :parameter: [file_mimetype]: string, required if is_sample=True, the binary mimetype
    :parameter: [file_name]: string, optional if is_sample=True, the binary name
    :parameter: [observable_name]: string, required if is_sample=False, the observable value
    :parameter: [observable_classification]: string, required if is_sample=False, (domain, ip, ...)
    :parameter: [analyzers_requested]: list of requested analyzer to run, before filters
    :parameter: [run_all_available_analyzers]: bool, default False
    :parameter: [force_privacy]: boolean, default False, enable it if you want to avoid to run analyzers with privacy issues
    :parameter: [disable_external_analyzers]: boolean, default False, enable it if you want to exclude external analyzers
    :parameter: [test]: disable analysis for API testing

    :return: 202 if accepted, 500 if failed
    """
    source = str(request.user)
    warnings = []
    try:
        data_received = request.data
        logger.info(
            "send_analysis_request received request from {}. Data:{}".format(
                source, dict(data_received)))

        test = data_received.get('test', False)

        params = {'source': source}

        serializer = serializers.JobSerializer(data=data_received)
        if serializer.is_valid():
            serialized_data = serializer.validated_data
            logger.info("serialized_data: {}".format(serialized_data))

            # some values are mandatory only in certain cases
            if serialized_data['is_sample']:
                if 'file' not in data_received:
                    return Response({"error": "810"},
                                    status=status.HTTP_400_BAD_REQUEST)
                if 'file_mimetype' not in data_received:
                    return Response({"error": "811"},
                                    status=status.HTTP_400_BAD_REQUEST)
            else:
                if 'observable_name' not in data_received:
                    return Response({"error": "812"},
                                    status=status.HTTP_400_BAD_REQUEST)
                if 'observable_classification' not in data_received:
                    return Response({"error": "813"},
                                    status=status.HTTP_400_BAD_REQUEST)

            # we need to clean the list of requested analyzers, based on configuration data
            analyzers_config = utilities.get_analyzer_config()
            run_all_available_analyzers = serialized_data[
                'run_all_available_analyzers']
            analyzers_requested = serialized_data.get('analyzers_requested',
                                                      [])
            if run_all_available_analyzers:
                if analyzers_requested:
                    logger.info(
                        "either you specify a list of requested analyzers or the"
                        " 'run_all_available_analyzers' parameter, not both")
                    return Response({"error": "816"},
                                    status=status.HTTP_400_BAD_REQUEST)
                # just pick all available analyzers
                analyzers_requested = [
                    analyzer_name for analyzer_name in analyzers_config
                ]
            cleaned_analyzer_list = utilities.filter_analyzers(
                serialized_data,
                analyzers_requested,
                analyzers_config,
                warnings,
                run_all=run_all_available_analyzers)
            params['analyzers_to_execute'] = cleaned_analyzer_list
            if len(cleaned_analyzer_list) < 1:
                logger.info(
                    "after the filter, no analyzers can be run. Try with other analyzers"
                )
                return Response({"error": "814"},
                                status=status.HTTP_400_BAD_REQUEST)

            # save the arrived data plus new params into a new job object
            serializer.save(**params)
            job_id = serializer.data.get('id', '')
            md5 = serializer.data.get('md5', '')
            logger.info("new job_id {} for md5 {}".format(job_id, md5))
            if not job_id:
                return Response({"error": "815"},
                                status=status.HTTP_400_BAD_REQUEST)

        else:
            error_message = "serializer validation failed: {}".format(
                serializer.errors)
            logger.info(error_message)
            return Response(error_message, status=status.HTTP_400_BAD_REQUEST)

        is_sample = serializer.data.get('is_sample', '')
        if not test:
            general.start_analyzers(params['analyzers_to_execute'],
                                    analyzers_config, job_id, md5, is_sample)

        response_dict = {
            "status": "accepted",
            "job_id": job_id,
            "warnings": warnings,
            "analyzers_running": cleaned_analyzer_list
        }

        logger.debug(response_dict)

        return Response(response_dict, status=status.HTTP_200_OK)

    except Exception as e:
        logger.exception(
            "receive_analysis_request requester:{} error:{}".format(source, e))
        return Response(
            {"error": "error in send_analysis_request. Check logs"},
            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
Exemple #6
0
def send_analysis_request(request):
    """
    This endpoint allows to start a Job related to a file or an observable

    :param is_sample: bool
        is a sample (file) or an observable (domain, ip, ...)
    :param md5: string
        md5 of the item to analyze
    :param [file]: binary
        required if is_sample=True, the binary
    :param [file_mimetype]: string
        optional, the binary mimetype, calculated by default
    :param [file_name]: string
        optional if is_sample=True, the binary name
    :param [observable_name]: string
        required if is_sample=False, the observable value
    :param [observable_classification]: string
        required if is_sample=False, (domain, ip, ...)
    :param [analyzers_requested]: list
        list of requested analyzer to run, before filters
    :param [tags_id]: list<int>
        list of id's of tags to apply to job
    :param [run_all_available_analyzers]: bool
        default False
    :param [force_privacy]: bool
        default False,
        enable it if you want to avoid to run analyzers with privacy issues
    :param [disable_external_analyzers]: bool
        default False,
        enable it if you want to exclude external analyzers
    :param [test]: bool
        disable analysis for API testing

    :return 202:
        if accepted
    :return 500:
        if failed
    """
    source = str(request.user)
    warnings = []
    try:
        data_received = request.data
        logger.info(
            f"send_analysis_request received request from {source}."
            f"Data:{dict(data_received)}."
        )

        test = data_received.get("test", False)

        params = {"source": source}

        serializer = serializers.JobSerializer(data=data_received)
        if serializer.is_valid():
            serialized_data = serializer.validated_data
            logger.info(f"serialized_data: {serialized_data}")

            # some values are mandatory only in certain cases
            if serialized_data["is_sample"]:
                if "file" not in data_received:
                    return Response(
                        {"error": "810"}, status=status.HTTP_400_BAD_REQUEST
                    )
                if "file_mimetype" not in data_received:
                    serialized_data["file_mimetype"] = utilities.calculate_mimetype(
                        data_received["file"], data_received.get("file_name", "")
                    )
            else:
                if "observable_name" not in data_received:
                    return Response(
                        {"error": "812"}, status=status.HTTP_400_BAD_REQUEST
                    )
                if "observable_classification" not in data_received:
                    return Response(
                        {"error": "813"}, status=status.HTTP_400_BAD_REQUEST
                    )

            # we need to clean the list of requested analyzers,
            # ... based on configuration data
            analyzers_config = utilities.get_analyzer_config()
            run_all_available_analyzers = serialized_data.get(
                "run_all_available_analyzers", False
            )
            analyzers_requested = serialized_data.get("analyzers_requested", [])
            if run_all_available_analyzers:
                if analyzers_requested:
                    logger.info(
                        """either you specify a list of requested analyzers or the
                         'run_all_available_analyzers' parameter, not both"""
                    )
                    return Response(
                        {"error": "816"}, status=status.HTTP_400_BAD_REQUEST
                    )
                # just pick all available analyzers
                analyzers_requested = [
                    analyzer_name for analyzer_name in analyzers_config
                ]
            cleaned_analyzer_list = utilities.filter_analyzers(
                serialized_data,
                analyzers_requested,
                analyzers_config,
                warnings,
                run_all=run_all_available_analyzers,
            )
            params["analyzers_to_execute"] = cleaned_analyzer_list
            if len(cleaned_analyzer_list) < 1:
                logger.info(
                    """after the filter, no analyzers can be run.
                     Try with other analyzers"""
                )
                return Response({"error": "814"}, status=status.HTTP_400_BAD_REQUEST)

            # save the arrived data plus new params into a new job object
            serializer.save(**params)
            job_id = serializer.data.get("id", "")
            md5 = serializer.data.get("md5", "")
            logger.info(f"new job_id {job_id} for md5 {md5}")
            if not job_id:
                return Response({"error": "815"}, status=status.HTTP_400_BAD_REQUEST)

        else:
            error_message = f"serializer validation failed: {serializer.errors}"
            logger.info(error_message)
            return Response(
                {"error": error_message}, status=status.HTTP_400_BAD_REQUEST
            )

        is_sample = serializer.data.get("is_sample", "")
        if not test:
            general.start_analyzers(
                params["analyzers_to_execute"], analyzers_config, job_id, md5, is_sample
            )

        response_dict = {
            "status": "accepted",
            "job_id": job_id,
            "warnings": warnings,
            "analyzers_running": cleaned_analyzer_list,
        }

        logger.debug(response_dict)

        return Response(response_dict, status=status.HTTP_200_OK)

    except Exception as e:
        logger.exception(f"receive_analysis_request requester:{source} error:{e}.")
        return Response(
            {"error": "error in send_analysis_request. Check logs"},
            status=status.HTTP_500_INTERNAL_SERVER_ERROR,
        )