Ejemplo n.º 1
0
def analyze(ctx, target: List[str], async_flag: bool, mode: str,
            create_group: bool, group_id: str, group_name: str,
            min_severity: str, swc_blacklist: str, swc_whitelist: str,
            solc_version: str, solc_path: str, include: Tuple[str],
            remap_import: Tuple[str], check_properties: bool,
            enable_scribble: bool, scribble_path: str, scenario: str,
            project_id: str) -> None:
    """Analyze the given directory or arguments with MythX.

    \f

    :param ctx: Click context holding group-level parameters
    :param target: Arguments passed to the `analyze` subcommand
    :param async_flag: Whether to execute the analysis asynchronously
    :param mode: Full or quick analysis mode
    :param create_group: Create a new group for the analysis
    :param group_id: The group ID to add the analysis to
    :param group_name: The group name to attach to the analysis
    :param min_severity: Ignore SWC IDs below the designated level
    :param swc_blacklist: A comma-separated list of SWC IDs to ignore
    :param swc_whitelist: A comma-separated list of SWC IDs to include
    :param solc_version: The solc version to use for Solidity compilation
    :param solc_path: The path to a custom solc executable
    :param include: List of contract names to send - exclude everything else
    :param remap_import: List of import remappings to pass on to solc
    :param check_properties: Enable property verification mode
    :param enable_scribble: Enable instrumentation with scribble
    :param scribble_path: Optional path to the scribble executable
    :param scenario: Force an analysis scenario
    :param project_id: Id of project to add this analysis to
    :return:
    """

    analyze_config = ctx.get("analyze")
    if async_flag is None:
        async_flag = analyze_config.get("async", False)
    if create_group is None:
        create_group = analyze_config.get("create-group", False)

    mode = mode or analyze_config.get("mode") or "quick"
    project_id = project_id or analyze_config.get("project-id") or None
    group_id = analyze_config.get("group-id") or group_id or None
    group_name = group_name or analyze_config.get("group-name") or ""
    min_severity = min_severity or analyze_config.get("min-severity") or None
    swc_blacklist = swc_blacklist or analyze_config.get("blacklist") or None
    swc_whitelist = swc_whitelist or analyze_config.get("whitelist") or None
    solc_version = solc_version or analyze_config.get("solc") or None
    include = include or analyze_config.get("contracts") or []
    remap_import = remap_import or analyze_config.get("remappings") or []
    check_properties = (check_properties
                        or analyze_config.get("check-properties") or False)
    enable_scribble = enable_scribble or analyze_config.get(
        "enable-scribble") or False
    scribble_path = scribble_path or analyze_config.get(
        "scribble-path") or "scribble"
    target = target or analyze_config.get("targets") or None
    scenario = scenario or analyze_config.get("scenario") or None

    if project_id and not create_group:
        LOGGER.debug(f"Only use project-id when create_group is enabled.")
        sys.exit(1)

    # enable property checking if explicitly requested or implicitly when
    # scribble instrumentation is requested
    ctx["client"].handler.middlewares.append(
        PropertyCheckingMiddleware(check_properties or enable_scribble))

    if create_group:
        resp: GroupCreationResponse = ctx["client"].create_group(
            group_name=group_name)
        group_id = resp.group.identifier
        group_name = resp.group.name or ""

    if group_id:
        # associate all following analyses to the passed or newly created group
        group_mw = GroupDataMiddleware(group_id=group_id,
                                       group_name=group_name)
        ctx["client"].handler.middlewares.append(group_mw)

    if project_id and group_id:
        resp: GroupOperationResponse = ctx["client"].add_group_to_project(
            group_id=group_id, project_id=project_id)
        if not resp.project_id == project_id:
            LOGGER.debug(
                f"Failed to add group to project with id {project_id}.")

    jobs: List[Dict[str, Any]] = []
    include = list(include)
    mode_list = determine_analysis_targets(target, forced_scenario=scenario)

    for scenario, element in mode_list:
        if scenario == ScenarioMode.TRUFFLE:
            job = TruffleJob(element)
            job.generate_payloads(
                enable_scribble=enable_scribble,
                remappings=remap_import,
                scribble_path=scribble_path,
            )
            jobs.extend(job.payloads)
        elif scenario == ScenarioMode.SOLIDITY_DIR:
            # recursively enumerate sol files if not a truffle project
            LOGGER.debug(
                f"Identified {element} as directory containing Solidity files")
            jobs.extend(
                SolidityJob.walk_solidity_files(
                    solc_version=solc_version,
                    solc_path=solc_path,
                    base_path=element,
                    remappings=remap_import,
                    enable_scribble=enable_scribble,
                    scribble_path=scribble_path,
                ))
        elif scenario == ScenarioMode.SOLIDITY_FILE:
            LOGGER.debug(f"Trying to interpret {element} as a solidity file")
            target_split = element.split(":")
            file_path, contract = target_split[0], target_split[1:]
            if contract:
                include += contract  # e.g. ["MyContract"] or []
                contract = contract[0]
            job = SolidityJob(Path(file_path))
            job.generate_payloads(
                version=solc_version,
                solc_path=solc_path,
                contract=contract or None,
                remappings=remap_import,
                enable_scribble=enable_scribble,
                scribble_path=scribble_path,
            )
            jobs.extend(job.payloads)

    # reduce to whitelisted contract names
    if include:
        LOGGER.debug(
            f"Filtering {len(jobs)} job(s) for contracts to be included")
        found_contracts = {job["contract_name"] for job in jobs}
        overlap = set(include).difference(found_contracts)
        if overlap:
            raise click.UsageError(
                f"The following contracts could not be found: {', '.join(overlap)}"
            )
        jobs = [job for job in jobs if job["contract_name"] in include]

    # filter jobs where no bytecode was produced
    LOGGER.debug(f"Filtering {len(jobs)} job(s) for empty bytecode")
    jobs = [job for job in jobs if is_valid_job(job)]

    # sanitize local paths
    LOGGER.debug(f"Sanitizing {len(jobs)} jobs")
    jobs = [sanitize_paths(job) for job in jobs]

    LOGGER.debug(f"Submitting {len(jobs)} analysis jobs to the MythX API")

    if not jobs:
        raise click.UsageError(
            ("No jobs were generated. Please make sure your Solidity files "
             "compile correctly or your Truffle project has been compiled."))

    consent = ctx["yes"] or click.confirm(f"Found {len(jobs)} job(s). Submit?")
    if not consent:
        LOGGER.debug("User consent not given - exiting")
        sys.exit(0)

    uuids = []
    with click.progressbar(jobs) as bar:
        for job in bar:
            # attach execution mode, submit, poll
            job.update({"analysis_mode": mode})
            resp = ctx["client"].analyze(**job)
            uuids.append(resp.uuid)

    if async_flag:
        LOGGER.debug(
            f"Asynchronous submission enabled - printing {len(uuids)} UUIDs and exiting"
        )
        write_or_print("\n".join(uuids))
        return

    issues_list: List[Tuple[DetectedIssuesResponse,
                            Optional[AnalysisInputResponse]]] = []
    formatter: BaseFormatter = FORMAT_RESOLVER[ctx["fmt"]]
    for uuid in uuids:
        while not ctx["client"].analysis_ready(uuid):
            # TODO: Add poll interval option
            LOGGER.debug(f"Analysis {uuid} not ready yet - waiting")
            time.sleep(3)
        LOGGER.debug(f"{uuid}: Fetching report")
        resp: DetectedIssuesResponse = ctx["client"].report(uuid)
        LOGGER.debug(f"{uuid}: Fetching input")
        inp: Optional[AnalysisInputResponse] = ctx["client"].request_by_uuid(
            uuid) if formatter.report_requires_input else None

        LOGGER.debug(f"{uuid}: Applying SWC filters")
        util.filter_report(
            resp,
            min_severity=min_severity,
            swc_blacklist=swc_blacklist,
            swc_whitelist=swc_whitelist,
        )
        # extend response with job UUID to keep formatter logic isolated
        resp.uuid = uuid
        issues_list.append((resp, inp))

    LOGGER.debug(
        f"Printing report for {len(issues_list)} issue items with sort key \"{ctx['table_sort_key']}\""
    )
    write_or_print(
        formatter.format_detected_issues(issues_list,
                                         table_sort_key=ctx["table_sort_key"]))
    sys.exit(ctx["retval"])
Ejemplo n.º 2
0
def analyze(
    ctx,
    target,
    async_flag,
    mode,
    create_group,
    group_id,
    group_name,
    min_severity,
    swc_blacklist,
    swc_whitelist,
    solc_version,
):
    """Analyze the given directory or arguments with MythX.
    \f

    :param ctx: Click context holding group-level parameters
    :param target: Arguments passed to the `analyze` subcommand
    :param async_flag: Whether to execute the analysis asynchronously
    :param mode: Full or quick analysis mode
    :param create_group: Create a new group for the analysis
    :param group_id: The group ID to add the analysis to
    :param group_name: The group name to attach to the analysis
    :param min_severity: Ignore SWC IDs below the designated level
    :param swc_blacklist: A comma-separated list of SWC IDs to ignore
    :param swc_whitelist: A comma-separated list of SWC IDs to include
    :param solc_version: The solc version to use for Solidity compilation
    :return:
    """

    group_name = group_name or ""
    if create_group:
        resp: GroupCreationResponse = ctx["client"].create_group(
            group_name=group_name)
        group_id = resp.group.identifier
        group_name = resp.group.name or ""

    if group_id:
        # associate all following analyses to the passed or newly created group
        group_mw = GroupDataMiddleware(group_id=group_id,
                                       group_name=group_name)
        ctx["client"].handler.middlewares.append(group_mw)

    jobs = []

    if not target:
        if Path("truffle-config.js").exists() or Path("truffle.js").exists():
            files = find_truffle_artifacts(Path.cwd())
            if not files:
                raise click.exceptions.UsageError((
                    "Could not find any truffle artifacts. Are you in the project root? "
                    "Did you run truffle compile?"))
            LOGGER.debug("Detected Truffle project with files:\n{}".format(
                "\n".join(files)))
            for file in files:
                jobs.append(generate_truffle_payload(file))

        elif list(glob("*.sol")):
            jobs = walk_solidity_files(ctx, solc_version)
        else:
            raise click.exceptions.UsageError(
                "No argument given and unable to detect Truffle project or Solidity files"
            )
    else:
        for target_elem in target:
            if target_elem.startswith("0x"):
                LOGGER.debug(
                    "Identified target {} as bytecode".format(target_elem))
                jobs.append(generate_bytecode_payload(target_elem))
                continue
            elif Path(target_elem).is_file() and Path(
                    target_elem).suffix == ".sol":
                LOGGER.debug(
                    "Trying to interpret {} as a solidity file".format(
                        target_elem))
                jobs.append(
                    generate_solidity_payload(target_elem, solc_version))
                continue
            elif Path(target_elem).is_dir():
                files = find_truffle_artifacts(Path(target_elem))
                if files:
                    # extract truffle artifacts if config found in target
                    jobs.extend(
                        [generate_truffle_payload(file) for file in files])
                else:
                    # recursively enumerate sol files if not a truffle project
                    jobs.extend(
                        walk_solidity_files(ctx,
                                            solc_version,
                                            base_path=target_elem))
            else:
                raise click.exceptions.UsageError(
                    "Could not interpret argument {} as bytecode or Solidity file"
                    .format(target_elem))

    jobs = [sanitize_paths(job) for job in jobs]
    uuids = []
    with click.progressbar(jobs) as bar:
        for job in bar:
            # attach execution mode, submit, poll
            job.update({"analysis_mode": mode})
            resp = ctx["client"].analyze(**job)
            uuids.append(resp.uuid)

    if async_flag:
        write_or_print("\n".join(uuids))
        return

    for uuid in uuids:
        while not ctx["client"].analysis_ready(uuid):
            # TODO: Add poll interval option
            time.sleep(3)
        resp: DetectedIssuesResponse = ctx["client"].report(uuid)
        inp = ctx["client"].request_by_uuid(uuid)

        util.filter_report(resp,
                           min_severity=min_severity,
                           swc_blacklist=swc_blacklist,
                           swc_whitelist=swc_whitelist)
        ctx["uuid"] = uuid
        write_or_print(FORMAT_RESOLVER[ctx["fmt"]].format_detected_issues(
            resp, inp))

    sys.exit(ctx["retval"])
Ejemplo n.º 3
0
def analyze(
    ctx,
    target: List[str],
    async_flag: bool,
    mode: str,
    create_group: bool,
    group_id: str,
    group_name: str,
    min_severity: str,
    swc_blacklist: str,
    swc_whitelist: str,
    solc_version: str,
    include: Tuple[str],
    remap_import: Tuple[str],
) -> None:
    """Analyze the given directory or arguments with MythX.

    \f

    :param ctx: Click context holding group-level parameters
    :param target: Arguments passed to the `analyze` subcommand
    :param async_flag: Whether to execute the analysis asynchronously
    :param mode: Full or quick analysis mode
    :param create_group: Create a new group for the analysis
    :param group_id: The group ID to add the analysis to
    :param group_name: The group name to attach to the analysis
    :param min_severity: Ignore SWC IDs below the designated level
    :param swc_blacklist: A comma-separated list of SWC IDs to ignore
    :param swc_whitelist: A comma-separated list of SWC IDs to include
    :param solc_version: The solc version to use for Solidity compilation
    :param include: List of contract names to send - exclude everything else
    :param remap_import: List of import remappings to pass on to solc
    :return:
    """

    analyze_config = ctx.get("analyze")
    if analyze_config is not None:
        LOGGER.debug("Detected additional yaml config keys - applying")
        config_async = analyze_config.get("async")
        async_flag = config_async if config_async is not None else async_flag
        mode = analyze_config.get("mode") or mode
        config_create_group = analyze_config.get("create-group")
        create_group = (config_create_group
                        if config_create_group is not None else create_group)
        group_id = analyze_config.get("group-id") or group_id
        group_name = analyze_config.get("group-name") or group_name
        min_severity = analyze_config.get("min-severity") or min_severity
        swc_blacklist = analyze_config.get("blacklist") or swc_blacklist
        swc_whitelist = analyze_config.get("whitelist") or swc_whitelist
        solc_version = analyze_config.get("solc") or solc_version
        include = analyze_config.get("contracts") or include
        remap_import = analyze_config.get("remappings") or remap_import
        target = analyze_config.get("targets") or target

    group_name = group_name or ""
    if create_group:
        resp: GroupCreationResponse = ctx["client"].create_group(
            group_name=group_name)
        group_id = resp.group.identifier
        group_name = resp.group.name or ""

    if group_id:
        # associate all following analyses to the passed or newly created group
        group_mw = GroupDataMiddleware(group_id=group_id,
                                       group_name=group_name)
        ctx["client"].handler.middlewares.append(group_mw)

    jobs: List[Dict[str, Any]] = []
    include = list(include)

    if not target:
        if Path("truffle-config.js").exists() or Path("truffle.js").exists():
            files = find_truffle_artifacts(Path.cwd())
            if not files:
                raise click.exceptions.UsageError((
                    "Could not find any truffle artifacts. Are you in the project root? "
                    "Did you run truffle compile?"))
            LOGGER.debug(
                f"Detected Truffle project with files:{', '.join(files)}")
            for file in files:
                jobs.append(generate_truffle_payload(file))

        elif list(glob("*.sol")):
            LOGGER.debug(f"Detected Solidity files in directory")
            jobs = walk_solidity_files(ctx=ctx,
                                       solc_version=solc_version,
                                       remappings=remap_import)
        else:
            raise click.exceptions.UsageError(
                "No argument given and unable to detect Truffle project or Solidity files"
            )
    else:
        for target_elem in target:
            target_split = target_elem.split(":")
            element, suffix = target_split[0], target_split[1:]
            include += suffix
            if element.startswith("0x"):
                LOGGER.debug(f"Identified target {element} as bytecode")
                jobs.append(generate_bytecode_payload(element))
            elif Path(element).is_file() and Path(element).suffix == ".sol":
                LOGGER.debug(
                    f"Trying to interpret {element} as a solidity file")
                jobs.append(
                    generate_solidity_payload(
                        file=element,
                        version=solc_version,
                        contracts=suffix,
                        remappings=remap_import,
                    ))
            elif Path(element).is_dir():
                LOGGER.debug(f"Identified target {element} as directory")
                files = find_truffle_artifacts(Path(element))
                if files:
                    # extract truffle artifacts if config found in target
                    LOGGER.debug(
                        f"Identified {element} directory as truffle project")
                    jobs.extend(
                        [generate_truffle_payload(file) for file in files])
                else:
                    # recursively enumerate sol files if not a truffle project
                    LOGGER.debug(
                        f"Identified {element} as directory containing Solidity files"
                    )
                    jobs.extend(
                        walk_solidity_files(
                            ctx,
                            solc_version,
                            base_path=element,
                            remappings=remap_import,
                        ))
            else:
                raise click.exceptions.UsageError(
                    f"Could not interpret argument {element} as bytecode, Solidity file, or Truffle project"
                )

    # sanitize local paths
    LOGGER.debug(f"Sanitizing {len(jobs)} jobs")
    jobs = [sanitize_paths(job) for job in jobs]
    # filter jobs where no bytecode was produced
    LOGGER.debug(f"Filtering {len(jobs)} jobs for empty bytecode")
    jobs = [job for job in jobs if is_valid_job(job)]

    # reduce to whitelisted contract names
    if include:
        LOGGER.debug(f"Filtering {len(jobs)} for contracts to be included")
        found_contracts = {job["contract_name"] for job in jobs}
        overlap = set(include).difference(found_contracts)
        if overlap:
            raise click.UsageError(
                f"The following contracts could not be found: {', '.join(overlap)}"
            )
        jobs = [job for job in jobs if job["contract_name"] in include]

    LOGGER.debug(f"Submitting {len(jobs)} analysis jobs to the MythX API")
    uuids = []
    with click.progressbar(jobs) as bar:
        for job in bar:
            # attach execution mode, submit, poll
            job.update({"analysis_mode": mode})
            resp = ctx["client"].analyze(**job)
            uuids.append(resp.uuid)

    if async_flag:
        LOGGER.debug(
            f"Asynchronous submission enabled - printing {len(uuids)} UUIDs and exiting"
        )
        write_or_print("\n".join(uuids))
        return

    issues_list: List[Tuple[DetectedIssuesResponse,
                            Optional[AnalysisInputResponse]]] = []
    formatter: BaseFormatter = FORMAT_RESOLVER[ctx["fmt"]]
    for uuid in uuids:
        while not ctx["client"].analysis_ready(uuid):
            # TODO: Add poll interval option
            LOGGER.debug(f"Analysis {uuid} not ready yet - waiting")
            time.sleep(3)
        LOGGER.debug(f"{uuid}: Fetching report")
        resp: DetectedIssuesResponse = ctx["client"].report(uuid)
        LOGGER.debug(f"{uuid}: Fetching input")
        inp: Optional[AnalysisInputResponse] = ctx["client"].request_by_uuid(
            uuid) if formatter.report_requires_input else None

        LOGGER.debug(f"{uuid}: Applying SWC filters")
        util.filter_report(
            resp,
            min_severity=min_severity,
            swc_blacklist=swc_blacklist,
            swc_whitelist=swc_whitelist,
        )
        # extend response with job UUID to keep formatter logic isolated
        resp.uuid = uuid
        issues_list.append((resp, inp))

    LOGGER.debug(f"Printing report for {len(issues_list)} issue items")
    write_or_print(formatter.format_detected_issues(issues_list))
    sys.exit(ctx["retval"])
Ejemplo n.º 4
0
)
from mythx_models.response import (
    AnalysisListResponse,
    AnalysisStatusResponse,
    AnalysisSubmissionResponse,
    AuthLoginResponse,
    AuthLogoutResponse,
    AuthRefreshResponse,
    DetectedIssuesResponse,
)

from pythx.middleware.group_data import GroupDataMiddleware

from .common import generate_request_dict, get_test_case

EMPTY_MIDDLEWARE = GroupDataMiddleware()
ID_ONLY_MIDDLEWARE = GroupDataMiddleware(group_id="test-id")
NAME_ONLY_MIDDLEWARE = GroupDataMiddleware(group_name="test-name")
FULL_MIDDLEWARE = GroupDataMiddleware(group_id="test-id",
                                      group_name="test-name")


@pytest.mark.parametrize(
    "middleware,request_dict,id_added,name_added",
    [
        (
            EMPTY_MIDDLEWARE,
            generate_request_dict(
                get_test_case("testdata/analysis-list-request.json",
                              AnalysisListRequest)),
            False,