Пример #1
0
    def setup(self) -> None:
        client = get_docker_client()

        if not any(i for i in client.images.list()
                   if self.DOCKER_IMAGE in i.tags):
            client.images.pull(self.DOCKER_IMAGE)
            logging.info(f"Retrieved {self.TOOL_ID} Container")
Пример #2
0
def _should_pull_analyzer(analyzer: SpecifiedAnalyzer) -> bool:
    """
        Returns True if the docker container for the analyzer is not
        available locally. Always returns False if the analyzer is a base
        analyzer (exists in SPECIAL_ANALYZERS)
    """

    if analyzer.versioned_analyzer.name in SPECIAL_ANALYZERS:
        return False

    client = get_docker_client()
    image_id = analyzer.versioned_analyzer.image_id
    return not any(i for i in client.images.list() if image_id in i.tags)
Пример #3
0
def prepull_analyzers(analyzer_name: str, version: Version) -> None:
    """
        Pulls all needed analyzers to run SPECIFIED_ANALYZER (i.e. dependencies)
    """

    specified_analyzer = SpecifiedAnalyzer(
        VersionedAnalyzer(AnalyzerName(analyzer_name), version))
    registry = RegistryData.from_json(REGISTRY)

    deps = registry.sorted_deps(specified_analyzer)
    client = get_docker_client()
    for dep in deps:
        if _should_pull_analyzer(dep):
            client.images.pull(dep.versioned_analyzer.image_id)
Пример #4
0
    def run(self, files: Iterable[str]) -> JsonR:
        targets = [str(PurePath(p).relative_to(self.base_path)) for p in files]

        cmd = [
            "--config=https://r2c.dev/default-r2c-checks",
            "--json",
            "--skip-pattern-validation",
            *targets,
        ]

        client = get_docker_client()

        vols = {str(self.base_path): {"bind": "/home/repo", "mode": "ro"}}
        output_raw = client.containers.run(self.DOCKER_IMAGE,
                                           auto_remove=True,
                                           volumes=vols,
                                           command=cmd)
        output_str = output_raw.decode("utf-8").strip()
        output = json.loads(output_str)
        return output.get("results", [])
Пример #5
0
    def ensure_daemon_running(self) -> str:
        client = get_docker_client()

        image_id = (f"{self.ANALYZER_NAME}:v{self.ANALYZER_VERSION}"
                    )  # note the v in front of the version
        running_containers = client.containers.list(filters={
            "name": self.CONTAINER_NAME,
            "status": "running"
        })
        if not running_containers:
            container = client.containers.run(
                image_id,
                command="/dev/fd/0",
                tty=True,
                name=self.CONTAINER_NAME,
                auto_remove=True,
                detach=True,
            )
            logging.info(f"started container with id: {container.id}")
            return container.id
        else:
            return running_containers[0].id
Пример #6
0
def run_analyzer_on_local_code(
    analyzer_name: str,
    version: Version,
    base: Path,
    ignore_files: Set[Path],
    target_files: Iterable[str],
) -> JsonR:
    """Run an analyzer on a local folder.
    """
    get_docker_client()  # Ensures that docker is running

    specified_analyzer = SpecifiedAnalyzer(
        VersionedAnalyzer(AnalyzerName(analyzer_name), version))
    registry = RegistryData.from_json(REGISTRY)

    json_output_store = LocalJsonOutputStore()
    filesystem_output_store = LocalFilesystemOutputStore()
    log_store = LocalLogStore()
    stats_store = LocalStatsStore()

    # All cacheing should be handled by bento
    json_output_store.delete_all()  # type: ignore
    filesystem_output_store.delete_all()  # type: ignore
    log_store.delete_all()  # type: ignore
    stats_store.delete_all()  # type: ignore

    pathlib.Path(LOCAL_RUN_TMP_FOLDER).mkdir(parents=True, exist_ok=True)

    analyzer = Analyzer(
        registry,
        json_output_store,
        filesystem_output_store,
        log_store,
        stats_store,
        workdir=LOCAL_RUN_TMP_FOLDER,
        timeout=
        0,  # Note Timeout relied on signaling which is not valid in multithreaded world
        memory_limit=CONTAINER_MEMORY_LIMIT,
    )

    # get all cloner versions from registry so we can copy the passed in code directory in place
    # of output for all versions of cloner
    fetchers = [
        sa for sa in registry.sorted_deps(specified_analyzer)
        if sa.versioned_analyzer.name in SPECIAL_ANALYZERS
    ]

    analyzer_input = LocalCode(str(base))
    for fetcher in fetchers:
        _copy_local_input(
            analyzer,
            fetcher.versioned_analyzer,
            analyzer_input,
            ignore_files,
            set(target_files),
        )

    analyzer.full_analyze_request(
        analyzer_input=analyzer_input,
        specified_analyzer=specified_analyzer,
        force=False,
    )

    # Get Final Output
    output = json_output_store.read(analyzer_input, specified_analyzer)
    if output is None:
        output = ""
    output_json = json.loads(output).get("results", [])

    # Cleanup state
    json_output_store.delete_all()  # type: ignore
    filesystem_output_store.delete_all()  # type: ignore
    log_store.delete_all()  # type: ignore
    stats_store.delete_all()  # type: ignore

    return output_json