Exemple #1
0
    def __process_task(self, task: AgentTask) -> None:
        """Dispatches and executes the next incoming task.

        The high level workflow look like this: for every new `search` job,
        mquery creates a new `search` task for every agent group.
        One of the agents will pick it up and execute, and create `yara`
        tasks. `yara` tasks will be executed by workers for every file in
        iterator, until it's exhausted.

        :param task: Task to be executed.
        :type task: AgentTask
        :raises RuntimeError: Task with unsupported type given.
        """
        if task.type == TaskType.RELOAD:
            if (self.plugin_config_version ==
                    self.db.get_plugin_config_version()):
                # This should never happen and suggests that there is bug somewhere
                # and version was not updated properly.
                logging.error(
                    "Critical error: Requested to reload configuration, but "
                    "configuration present in database is still the same (%s).",
                    self.plugin_config_version,
                )
                return
            logging.info("Configuration changed - reloading plugins.")
            # Request next agent to reload the configuration
            self.db.reload_configuration(self.plugin_config_version)
            # Reload configuration. Version will be updated during reinitialization,
            # so we don't receive our own request.
            self.__initialize_agent()
        elif task.type == TaskType.COMMAND:
            logging.info("Executing raw command: %s", task.data)
            self.ursa.execute_command(task.data)
        elif task.type == TaskType.SEARCH:
            job = JobId(task.data)
            logging.info(f"search: {job.hash}")

            try:
                self.__search_task(job)
            except Exception as e:
                logging.exception("Failed to execute task.")
                jobobj = self.db.get_job(job)
                self.db.agent_finish_job(jobobj.userid, job)
                self.db.fail_job(jobobj.userid, job, str(e))
        elif task.type == TaskType.YARA:
            data = json.loads(task.data)
            job = JobId(data["job"])
            iterator = data["iterator"]
            logging.info("yara: iterator %s", iterator)

            try:
                self.__yara_task(job, iterator)
            except Exception as e:
                logging.exception("Failed to execute task.")
                jobobj = self.db.get_job(job)
                self.db.agent_finish_job(jobobj.userid, job)
                self.db.fail_job(jobobj.userid, job, str(e))
        else:
            raise RuntimeError("Unsupported queue")
Exemple #2
0
def job_cancel(request: Request, job_id: str) -> StatusSchema:
    """
    Cancels the job with a provided `job_id`.
    """
    userid = request.client.host
    db.cancel_job(userid, JobId(job_id))
    return StatusSchema(status="ok")
Exemple #3
0
def matches(
    hash: str, offset: int = Query(...), limit: int = Query(...)
) -> MatchesSchema:
    """
    Returns a list of matched files, along with metadata tags and other
    useful information. Results from this query can be used to download files
    using the `/download` endpoint.
    """
    return db.get_job_matches(JobId(hash), offset, limit)
Exemple #4
0
def download(job_id: str, ordinal: int, file_path: str) -> FileResponse:
    """
    Sends a file from given `file_path`. This path should come from
    results of one of the previous searches.

    This endpoint needs `job_id` that found the specified file, and `ordinal`
    (index of the file in that job), to ensure that user can't download
    arbitrary files (for example "/etc/passwd").
    """
    if not db.job_contains(JobId(job_id), ordinal, file_path):
        raise NotFound("No such file in result set.")

    attach_name, ext = os.path.splitext(os.path.basename(file_path))
    return FileResponse(file_path, filename=attach_name + ext + "_")
Exemple #5
0
async def download_files(hash: str) -> StreamingResponse:
    matches = db.get_job_matches(JobId(hash)).matches
    return StreamingResponse(zip_files(matches))
Exemple #6
0
def download_hashes(hash: str) -> Response:
    hashes = "\n".join(
        d["meta"]["sha256"]["display_text"]
        for d in db.get_job_matches(JobId(hash)).matches
    )
    return Response(hashes + "\n")
Exemple #7
0
def query_remove(job_id: str) -> StatusSchema:
    db.remove_query(JobId(job_id))
    return StatusSchema(status="ok")
Exemple #8
0
def job_cancel(job_id: str) -> StatusSchema:
    """
    Cancels the job with a provided `job_id`.
    """
    db.cancel_job(JobId(job_id))
    return StatusSchema(status="ok")
Exemple #9
0
def job_info(job_id: str) -> JobSchema:
    """
    Returns a metadata for a single job. May be useful for monitoring
    a job progress.
    """
    return db.get_job(JobId(job_id))
Exemple #10
0
async def download_files(request: Request, hash: str) -> StreamingResponse:
    userid = request.client.host
    matches = db.get_job_matches(JobId(hash)).matches
    db.bump_quota(userid, RateLimitType.file_download, len(matches))
    return StreamingResponse(zip_files(matches))
Exemple #11
0
def query_remove(job_id: str) -> StatusSchema:
    job = db.get_job(JobId(job_id))
    if job.userid != "unknown":
        db.remove_query(JobId(job_id))
    return StatusSchema(status="ok")
Exemple #12
0
def download(job_id: str, ordinal: str, file_path: str) -> Any:
    if not db.job_contains(JobId(job_id), ordinal, file_path):
        raise NotFound("No such file in result set.")

    attach_name, ext = os.path.splitext(os.path.basename(file_path))
    return FileResponse(file_path, filename=attach_name + ext + "_")
Exemple #13
0
def job_cancel(job_id: str) -> StatusSchema:
    db.cancel_job(JobId(job_id))
    return StatusSchema(status="ok")
Exemple #14
0
def job_info(job_id: str) -> JobSchema:
    return db.get_job(JobId(job_id))
Exemple #15
0
def matches(
    hash: str, offset: int = Query(...), limit: int = Query(...)
) -> MatchesSchema:
    return db.get_job_matches(JobId(hash), offset, limit)