예제 #1
0
파일: engine.py 프로젝트: sk4la/plast
    def _dispatch_jobs(self):
        """
        .. py:function:: _dispatch_jobs(self)

        Dispatches the processing task(s) to the subprocess(es).

        :param self: current class instance
        :type self: class

        :return: number of match(es)
        :rtype: int
        """

        with multiprocessing.Manager() as manager:
            queue = manager.Queue()
            results = (multiprocessing.Lock(), multiprocessing.Value(ctypes.c_int, 0), manager.list())

            reader = multiprocessing.Process(target=_reader.Reader(queue, results, {
                "target": self.case.resources["matches"],
                "storage": self.case.resources["storage"],
                "format": self.case.arguments.format
            }).run)

            reader.daemon = True
            reader.start()

            _log.debug("Started reader subprocess to consume queue result(s).")

            with _magic.Pool(processes=self.case.arguments.processes) as pool:
                for file in self.case.resources["evidences"]:
                    if os.path.getsize(file) > self.case.arguments.max_size:
                        _log.warning("Evidence <{}> exceeds the maximum size. Ignoring evidence. Try changing --max-size to override this behavior.".format(file))
                        continue

                    pool.starmap_async(
                        _processors.File(self.case.arguments.hash_algorithms, self.case.arguments.callbacks, queue, self.case.arguments.fast).run, 
                        [(file, self.buffers)], 
                        error_callback=_log.inner_exception)

                    _log.debug("Mapped concurrent job to consume evidence <{}>.".format(file))

            queue.put(_codes.DONE)

            with _magic.Hole(KeyboardInterrupt, action=lambda:_log.fault("Aborted due to manual user interruption <SIGINT>.")):
                reader.join()

            return results[1].value
예제 #2
0
def _initialize(container):
    """
    .. py:function:: _initialize(container)

    Local entry point for the program.

    :param container: tuple containing the loaded module(s) and processed command-line argument(s)
    :type container: tuple
    """

    del container[1]._dummy

    modules = container[0]
    args = container[1]

    _log.set_console_level(args.logging.upper())

    if not _checker.number_rulesets():
        _log.fault("No YARA rulesets found. Nothing to be done.")

    if args.no_prompt:
        _conf.DEFAULTS["NO_PROMPT"] = True

    case = _case.Case(args)
    case._create_arborescence()

    if _conf.CASE_WIDE_LOGGING:
        _log._create_file_logger("case",
                                 os.path.join(
                                     case.resources["case"],
                                     "{}.log".format(_meta.__package__)),
                                 level=_conf.CASE_WIDE_LOGGING_LEVEL,
                                 encoding=_conf.OUTPUT_CHARACTER_ENCODING)

    feed = _fs.expand_files(args.input,
                            recursive=args.recursive,
                            include=args.include,
                            exclude=args.exclude)

    if not feed:
        _log.fault("No evidence(s) to process. Quitting.")

    if args._subparser:
        Module = container[0][args._subparser]
        Module.case = case
        Module.feed = feed

        with _magic.Hole(
                Exception,
                action=lambda: _log.fault(
                    "Fatal exception raised within preprocessing module <{}>.".
                    format(args._subparser),
                    post_mortem=True)), _magic.Invocator(Module):
            Module.run()

        del Module

    else:
        _log.debug("Guessing data type(s).")
        _dispatch_preprocessing(modules, case, feed)

    if not case.resources["evidences"]:
        _log.fault("No evidence(s) to process. Quitting.")
예제 #3
0
def _argparser(parser):
    """
    .. py:function:: _argparser(parser)

    Command-line argument parsing function.

    :param parser: :code:`argparse.Parser` instance
    :type parser: class

    :param parser: tuple containing the loaded module(s) and the processed command-line argument(s)
    :rtype: tuple
    """

    modules = {}

    parser.add_argument("-i",
                        "--input",
                        nargs="+",
                        action=_parser.AbsolutePathMultiple,
                        required=True,
                        metavar="PATH",
                        help="input file(s) or directory(ies)")

    parser.add_argument(
        "-o",
        "--output",
        required=True,
        action=_parser.AbsolutePath,
        metavar="PATH",
        help="path to the output directory to be created for the current case")

    parser.add_argument(
        "--callbacks",
        nargs="*",
        choices=_loader.render_modules(_callback, _models.Callback),
        default=(_loader.render_modules(_callback, _models.Callback)
                 if _conf.INVOKE_ALL_MODULES_IF_NONE_SPECIFIED else []),
        action=_parser.Unique,
        help="select the callback(s) that will handle the resulting data [*]")

    parser.add_argument(
        "--exclude",
        nargs="+",
        default=_conf.DEFAULTS["EXCLUSION_FILTERS"],
        metavar="FILTER",
        help="override include and ignore file(s) matching wildcard filter(s) {}"
        .format(_conf.DEFAULTS["EXCLUSION_FILTERS"]))

    parser.add_argument("--fast",
                        action="store_true",
                        default=_conf.DEFAULTS["YARA_FAST_MODE"],
                        help="enable YARA's fast matching mode")

    parser.add_argument("--format",
                        choices=["json"],
                        default=_conf.DEFAULTS["OUTPUT_FORMAT"].lower(),
                        help="output format for detection(s) {}".format(
                            _conf.DEFAULTS["OUTPUT_FORMAT"].lower()))

    parser.add_argument(
        "--hash-algorithms",
        nargs="+",
        action=_parser.Unique,
        metavar="NAME",
        choices=[
            "md5", "sha1", "sha224", "sha256", "sha384", "sha512", "blake2b",
            "blake2s", "sha3_224", "sha3_256", "sha3_384", "sha3_512"
        ],
        default=[item.lower() for item in _conf.DEFAULTS["HASH_ALGORITHMS"]],
        help=
        "output format for detection(s), see hashlib API reference for supported algorithm(s) {}"
        .format([item.lower() for item in _conf.DEFAULTS["HASH_ALGORITHMS"]]))

    parser.add_argument("--ignore-warnings",
                        action="store_true",
                        default=_conf.YARA_ERROR_ON_WARNING,
                        help="ignore YARA compilation warning(s)")

    parser.add_argument(
        "--include",
        nargs="+",
        default=_conf.DEFAULTS["INCLUSION_FILTERS"],
        metavar="FILTER",
        help="only add file(s) matching wildcard filter(s) {}".format(
            _conf.DEFAULTS["INCLUSION_FILTERS"]))

    parser.add_argument(
        "--logging",
        choices=["debug", "info", "warning", "error", "critical", "suppress"],
        default=_conf.DEFAULTS["LOGGING_LEVEL"].lower(),
        help="override the default console logging level [{}]".format(
            _conf.DEFAULTS["LOGGING_LEVEL"].lower()))

    parser.add_argument("--max-size",
                        type=int,
                        default=300000000,
                        metavar="BYTES",
                        help="maximum size for the evidence(s) [300MB]")

    parser.add_argument("--no-prompt",
                        action="store_true",
                        default=_conf.DEFAULTS["NO_PROMPT"],
                        help="always use default answer when prompted")

    parser.add_argument(
        "--overwrite",
        action="store_true",
        help="force the overwriting of an existing output directory")

    parser.add_argument(
        "--post",
        nargs="*",
        choices=_loader.render_modules(_post, _models.Post),
        default=(_loader.render_modules(_post, _models.Post)
                 if _conf.INVOKE_ALL_MODULES_IF_NONE_SPECIFIED else []),
        action=_parser.Unique,
        help=
        "select the postprocessing module(s) that will handle the resulting data [*]"
    )

    parser.add_argument(
        "--processes",
        type=int,
        choices=range(1, 1001),
        default=(multiprocessing.cpu_count()
                 or _conf.DEFAULTS["PROCESSES_FALLBACK"]),
        metavar="NUMBER",
        help="override the number of concurrent processe(s) [{}]".format(
            multiprocessing.cpu_count() or
            (_conf.DEFAULTS["PROCESSES_FALLBACK"] if
             _conf.DEFAULTS["PROCESSES_FALLBACK"] in range(1, 1001) else 4)))

    parser.add_argument("-r",
                        "--recursive",
                        action="store_true",
                        help="walk through directory(ies) recursively")

    parser.add_argument("-",
                        dest="_dummy",
                        action="store_true",
                        help=argparse.SUPPRESS)

    for name, Module in _loader.iterate_modules(_pre, _models.Pre):
        subparser = parser.subparsers.add_parser(name,
                                                 description=getattr(
                                                     Module, "__description__",
                                                     None),
                                                 add_help=False)

        modules[name] = Module(subparser)
        modules[name].__name__ = name

        with _magic.Hole(argparse.ArgumentError):
            parser.register_help_hook(subparser)

            if getattr(modules[name], "__version__", None):
                parser.register_version(subparser, modules[name].__name__,
                                        modules[name].__version__)

    return modules, parser.parse_args()
예제 #4
0
            _log.warning(
                "Data type <{}> unsupported. Added evidence <{}> to the force-feeding list."
                .format(meta.mime, file))

    if tasks:
        for (name, Module), partial_feed in tasks.items():
            if _interaction.prompt(
                    "Found <{}> evidence(s) that can be dispatched. Do you want to automatically invoke the <{}> module using default option(s)?"
                    .format(len(partial_feed), name),
                    default_state=True):
                Module.case = case
                Module.feed = partial_feed

                with _magic.Hole(
                        Exception,
                        action=lambda: _log.fault(
                            "Fatal exception raised within preprocessing module <{}>."
                            .format(name),
                            post_mortem=True)), _magic.Invocator(Module):
                    Module.run()

                del Module


def _argparser(parser):
    """
    .. py:function:: _argparser(parser)

    Command-line argument parsing function.

    :param parser: :code:`argparse.Parser` instance
    :type parser: class
예제 #5
0
    def _dispatch_jobs(self):
        """
        .. py:function:: _dispatch_jobs(self)

        Dispatches the processing task(s) to the subprocess(es).

        :param self: current class instance
        :type self: class

        :return: number of match(es)
        :rtype: int
        """

        with multiprocessing.Manager() as manager:
            queue = manager.Queue()
            results = (multiprocessing.Lock(),
                       multiprocessing.Value(ctypes.c_int, 0))

            reader = multiprocessing.Process(target=_reader.Reader(
                queue, results, {
                    "target": self.case.resources["matches"],
                    "format": self.case.arguments.format
                }).run)

            reader.daemon = True
            reader.start()

            _log.debug("Started reader subprocess to process queue result(s).")

            with _magic.Pool(processes=self.case.arguments.processes) as pool:
                for file in self.case.resources["evidences"]["files"]:
                    pool.starmap_async(_processors.File(
                        self.case.arguments.hash_algorithms,
                        self.case.arguments.callbacks, queue,
                        self.case.arguments.fast).run, [(file, self.buffers)],
                                       error_callback=_log.inner_exception)

                    _log.debug(
                        "Mapped concurrent job to process evidence <{}>.".
                        format(file))

                for process in self.case.resources["evidences"]["processes"]:
                    pool.starmap_async(_processors.Process(
                        self.case.arguments.callbacks, queue,
                        self.case.arguments.fast).run,
                                       [(process, self.buffers)],
                                       error_callback=_log.inner_exception)

                    _log.debug(
                        "Mapped concurrent job to process live process matching PID <{}>."
                        .format(process))

            queue.put(_codes.DONE)

            with _magic.Hole(
                    KeyboardInterrupt,
                    action=lambda: _log.fault(
                        "Aborted due to manual user interruption <SIGINT>.")):
                reader.join()

            return results[1].value