def nodejs_build(src, reports_dir, lang_tools): """ Automatically build nodejs project :param src: Source directory :param reports_dir: Reports directory to store any logs :param lang_tools: Language specific build tools :return: boolean status from the build. True if the command executed successfully. False otherwise """ cmd_args = lang_tools.get("npm") yarn_mode = False pjson_files = [p.as_posix() for p in Path(src).glob("package.json")] ylock_files = [p.as_posix() for p in Path(src).glob("yarn.lock")] if ylock_files: cmd_args = lang_tools.get("yarn") yarn_mode = True elif not pjson_files: LOG.debug( "Nodejs auto build is supported only for npm or yarn based projects" ) return False cp = exec_tool(cmd_args, src) LOG.debug(cp.stdout) ret = cp.returncode == 0 try: cmd_args = ["npm"] if yarn_mode: cmd_args = ["yarn"] cmd_args += ["run", "build"] exec_tool(cmd_args, src) except Exception: LOG.debug("Automatic build has failed for the node.js project") return ret
def nodejs_build(src, reports_dir, lang_tools): """ Automatically build nodejs project :param src: Source directory :param reports_dir: Reports directory to store any logs :param lang_tools: Language specific build tools :return: boolean status from the build. True if the command executed successfully. False otherwise """ cmd_args = lang_tools.get("npm") yarn_mode = False rush_mode = False rushjson_files = [p.as_posix() for p in Path(src).glob("rush.json")] pjson_files = [p.as_posix() for p in Path(src).glob("package.json")] ylock_files = [p.as_posix() for p in Path(src).glob("yarn.lock")] if ylock_files: cmd_args = lang_tools.get("yarn") yarn_mode = True elif rushjson_files: cmd_args = lang_tools.get("rush") rush_mode = True elif not pjson_files: LOG.debug( "Nodejs auto build is supported only for npm or yarn or rush based projects" ) return False cp = exec_tool("auto-build", cmd_args, src) if cp: ret = cp.returncode == 0 else: ret = False try: cmd_args = ["npm"] if yarn_mode: cmd_args = ["yarn"] if rush_mode: cmd_args = ["rush", "rebuild"] else: cmd_args += ["run", "build"] exec_tool("auto-build", cmd_args, src) except Exception: if rush_mode: LOG.warning( "Automatic build for rush.js has failed. Try installing the packages manually before invoking scan.\nIf this works then let us know the build steps by filing an issue." ) else: LOG.debug("Automatic build has failed for the node.js project") return ret
def java_build(src, reports_dir, lang_tools): """ Automatically build java project :param src: Source directory :param reports_dir: Reports directory to store any logs :param lang_tools: Language specific build tools :return: boolean status from the build. True if the command executed successfully. False otherwise """ cmd_args = [] pom_files = [p.as_posix() for p in Path(src).glob("pom.xml")] env = os.environ.copy() if os.environ.get("USE_JAVA_8") or os.environ.get("WITH_JAVA_8"): env["SCAN_JAVA_HOME"] = os.environ.get("SCAN_JAVA_8_HOME") else: env["SCAN_JAVA_HOME"] = os.environ.get("SCAN_JAVA_11_HOME") if pom_files: cmd_args = lang_tools.get("maven") else: gradle_files = [p.as_posix() for p in Path(src).glob("build.gradle")] if gradle_files: cmd_args = lang_tools.get("gradle") if not cmd_args: LOG.info( "Java auto build is supported only for maven or gradle based projects" ) return False cp = exec_tool(cmd_args, src, env=env, stdout=subprocess.PIPE) LOG.debug(cp.stdout) return cp.returncode == 0
def auto_build(type_list, src, reports_dir): """ Automatically build project identified by type :param type_list: Project types :param src: Source directory :param reports_dir: Reports directory to store any logs :return: boolean status from the build. True if the command executed successfully. False otherwise """ ret = True for ptype in type_list: lang_tools = build_tools_map.get(ptype) if not lang_tools: continue if isinstance(lang_tools, list): cp = exec_tool(lang_tools, src, env=os.environ.copy(), stdout=subprocess.PIPE) LOG.debug(cp.stdout) ret = ret & (cp.returncode == 0) # Look for any _scan function in this module for execution try: ret = ret & getattr(sys.modules[__name__], "%s_build" % ptype)( src, reports_dir, lang_tools) except Exception: LOG.debug("Unable to auto build project of type {}".format(ptype)) return ret
def authenticate(): """ Method to authenticate with ShiftLeft NG SAST cloud when the required tokens gets passed via environment variables """ if is_authenticated(): return sl_org = config.get("SHIFTLEFT_ORG_ID", config.get("SHIFTLEFT_ORGANIZATION_ID")) sl_token = config.get("SHIFTLEFT_ACCESS_TOKEN") sl_cmd = config.get("SHIFTLEFT_NGSAST_CMD") run_uuid = config.get("run_uuid") if sl_org and sl_token and sl_cmd and utils.check_command(sl_cmd): inspect_login_args = [ sl_cmd, "auth", "--no-auto-update", "--no-diagnostic", "--org", sl_org, "--token", sl_token, ] cp = exec_tool("NG SAST", inspect_login_args) if cp.returncode != 0: LOG.warning( "ShiftLeft NG SAST authentication has failed. Please check the credentials" ) else: LOG.info("Successfully authenticated with NG SAST cloud") track({"id": run_uuid, "scan_mode": "ng-sast", "sl_org": sl_org})
def authenticate(): """ Method to authenticate with shiftleft inspect cloud when the required tokens gets passed via environment variables """ if is_authenticated(): return sl_org = config.get("SHIFTLEFT_ORG_ID") sl_token = config.get("SHIFTLEFT_ACCESS_TOKEN") sl_cmd = config.get("SHIFTLEFT_INSPECT_CMD") if sl_org and sl_token and sl_cmd: inspect_login_args = [ sl_cmd, "auth", "--no-auto-update", "--no-diagnostic", "--org", sl_org, "--token", sl_token, ] cp = exec_tool(inspect_login_args) if cp.returncode != 0: LOG.warning( "ShiftLeft Inspect authentication has failed. Please check the credentials" ) else: LOG.info("Successfully authenticated with inspect cloud")
def java_build(src, reports_dir, lang_tools): """ Automatically build java project :param src: Source directory :param reports_dir: Reports directory to store any logs :param lang_tools: Language specific build tools :return: boolean status from the build. True if the command executed successfully. False otherwise """ cmd_args = [] pom_files = [p.as_posix() for p in Path(src).rglob("pom.xml")] gradle_files = [p.as_posix() for p in Path(src).rglob("build.gradle")] sbt_files = [p.as_posix() for p in Path(src).rglob("build.sbt")] env = get_env() if pom_files: cmd_args = lang_tools.get("maven") elif gradle_files: cmd_args = get_gradle_cmd(src, lang_tools.get("gradle")) elif sbt_files: cmd_args = lang_tools.get("sbt") if not cmd_args: LOG.info( "Java auto build is supported only for maven or gradle based projects" ) return False cp = exec_tool("auto-build", cmd_args, src, env=env, stdout=subprocess.PIPE) if cp: LOG.debug(cp.stdout) return cp.returncode == 0 return False
def android_build(src, reports_dir, lang_tools): """ Automatically build android project :param src: Source directory :param reports_dir: Reports directory to store any logs :param lang_tools: Language specific build tools :return: boolean status from the build. True if the command executed successfully. False otherwise """ if not os.getenv("ANDROID_SDK_ROOT") and not os.getenv("ANDROID_HOME"): LOG.info( "ANDROID_SDK_ROOT or ANDROID_HOME should be set for automatically building android projects" ) return False lang_tools = build_tools_map.get("android") env = get_env() gradle_files = [p.as_posix() for p in Path(src).rglob("build.gradle")] gradle_kts_files = [ p.as_posix() for p in Path(src).rglob("build.gradle.kts") ] if gradle_files or gradle_kts_files: cmd_args = get_gradle_cmd(src, lang_tools.get("gradle")) cp = exec_tool("auto-build", cmd_args, src, env=env, stdout=subprocess.PIPE) if cp: LOG.debug(cp.stdout) return cp.returncode == 0 return False
def kotlin_build(src, reports_dir, lang_tools): """ Automatically build kotlin project :param src: Source directory :param reports_dir: Reports directory to store any logs :param lang_tools: Language specific build tools :return: boolean status from the build. True if the command executed successfully. False otherwise """ # Check if this is a android kotlin project gradle_kts_files = [p.as_posix() for p in Path(src).rglob("build.gradle.kts")] if find_files(src, "proguard-rules.pro", False, True) or find_files( src, "AndroidManifest.xml", False, True ): return android_build(src, reports_dir, lang_tools) if gradle_kts_files: cmd_args = get_gradle_cmd(src, lang_tools.get("gradle")) cp = exec_tool( "auto-build", cmd_args, src, env=get_env(), stdout=subprocess.PIPE ) if cp: LOG.debug(cp.stdout) return cp.returncode == 0 else: return java_build(src, reports_dir, lang_tools)
def php_build(src, reports_dir, lang_tools): """ Automatically build php project :param src: Source directory :param reports_dir: Reports directory to store any logs :param lang_tools: Language specific build tools :return: boolean status from the build. True if the command executed successfully. False otherwise """ ret = False cmd_args = lang_tools.get("install") cjson_files = [p.as_posix() for p in Path(src).glob("composer.json")] # If there is no composer.json try to create one if not cjson_files: cp = exec_tool( "auto-build", lang_tools.get("init"), src, env=os.environ.copy(), stdout=subprocess.PIPE, ) if cp: LOG.debug(cp.stdout) cp = exec_tool("auto-build", cmd_args, src, env=os.environ.copy(), stdout=subprocess.PIPE) if cp: LOG.debug(cp.stdout) ret = cp.returncode == 0 # If composer install fails, try composer update if not ret: cmd_args = lang_tools.get("update") cp = exec_tool("auto-build", cmd_args, src, env=os.environ.copy(), stdout=subprocess.PIPE) if cp: LOG.debug(cp.stdout) ret = cp.returncode == 0 return ret
def inspect_scan(language, src, reports_dir, convert, repo_context): """ Method to perform inspect cloud scan Args: language Project language src Project dir reports_dir Directory for output reports convert Boolean to enable normalisation of reports json repo_context Repo context """ run_uuid = config.get("run_uuid") cpg_mode = config.get("SHIFTLEFT_CPG") env = os.environ.copy() env["SCAN_JAVA_HOME"] = os.environ.get("SCAN_JAVA_8_HOME") report_fname = utils.get_report_file( "ng-sast", reports_dir, convert, ext_name="json" ) sl_cmd = config.get("SHIFTLEFT_NGSAST_CMD") # Check if sl cli is available if not utils.check_command(sl_cmd): LOG.warning( "sl cli is not available. Please check if your build uses shiftleft/scan-java as the image" ) return analyze_files = config.get("SHIFTLEFT_ANALYZE_FILE") analyze_target_dir = config.get( "SHIFTLEFT_ANALYZE_DIR", os.path.join(src, "target") ) extra_args = None if not analyze_files: if language == "java": analyze_files = utils.find_java_artifacts(analyze_target_dir) elif language == "csharp": if not utils.check_dotnet(): LOG.warning( "dotnet is not available. Please check if your build uses shiftleft/scan-csharp as the image" ) return analyze_files = utils.find_csharp_artifacts(src) cpg_mode = True else: if language == "ts" or language == "nodejs": language = "js" extra_args = ["--", "--ts", "--babel"] analyze_files = [src] cpg_mode = True app_name = find_app_name(src, repo_context) branch = repo_context.get("revisionId") if not branch: branch = "master" if not analyze_files: LOG.warning( "Unable to find any build artifacts. Compile your project first before invoking scan or use the auto build feature." ) return if isinstance(analyze_files, list) and len(analyze_files) > 1: LOG.warning( "Multiple files found in {}. Only {} will be analyzed".format( analyze_target_dir, analyze_files[0] ) ) analyze_files = analyze_files[0] sl_args = [ sl_cmd, "analyze", "--no-auto-update" if language == "java" else None, "--wait", "--cpg" if cpg_mode else None, "--" + language, "--tag", "branch=" + branch, "--app", app_name, ] sl_args += [analyze_files] if extra_args: sl_args += extra_args sl_args = [arg for arg in sl_args if arg is not None] LOG.info( "About to perform ShiftLeft NG SAST cloud analysis. This might take a few minutes ..." ) LOG.debug(" ".join(sl_args)) LOG.debug(repo_context) cp = exec_tool("NG SAST", sl_args, src, env=env) if cp.returncode != 0: LOG.warning("NG SAST cloud analyze has failed with the below logs") LOG.debug(sl_args) LOG.info(cp.stderr) return findings_data = fetch_findings(app_name, branch, report_fname) if findings_data and convert: crep_fname = utils.get_report_file( "ng-sast", reports_dir, convert, ext_name="sarif" ) convertLib.convert_file("ng-sast", sl_args[1:], src, report_fname, crep_fname) track({"id": run_uuid, "scan_mode": "ng-sast", "sl_args": sl_args})
def inspect_scan(language, src, reports_dir, convert, repo_context): """ Method to perform inspect cloud scan Args: language Project language src Project dir reports_dir Directory for output reports convert Boolean to enable normalisation of reports json repo_context Repo context """ report_fname = utils.get_report_file("inspect", reports_dir, convert, ext_name="json") sl_cmd = config.get("SHIFTLEFT_INSPECT_CMD") analyze_target_dir = config.get("SHIFTLEFT_ANALYZE_DIR", os.path.join(src, "target")) analyze_files = config.get("SHIFTLEFT_ANALYZE_FILE") if not analyze_files: if language == "java": analyze_files = utils.find_java_artifacts(analyze_target_dir) app_name = config.get("SHIFTLEFT_APP", repo_context.get("repositoryName")) no_cpg = config.get("SHIFTLEFT_NO_CPG") if not app_name: app_name = os.path.dirname(src) branch = repo_context.get("revisionId") if not branch: branch = "master" if not analyze_files: LOG.warning( "Unable to find any build artifacts in {}. Run mvn package or a similar command before invoking inspect scan" .format(analyze_target_dir)) return if len(analyze_files) > 1: LOG.warning( "Multiple jar files found in {}. Only {} will be analyzed".format( analyze_target_dir, analyze_files[0])) sl_args = [ sl_cmd, "analyze", "--no-auto-update", "--wait", "--cpg" if not no_cpg else "", "--java", "--tag", "branch=" + branch, "--app", app_name, ] sl_args += [analyze_files[0]] env = os.environ.copy() env["JAVA_HOME"] = os.environ.get("JAVA_8_HOME") LOG.info( "About to perform ShiftLeft Inspect cloud analysis. This might take a few minutes ..." ) cp = exec_tool(sl_args, src, env=env) if cp.returncode != 0: LOG.warning("Inspect cloud analyze has failed with the below logs") LOG.info(cp.stdout) LOG.info(cp.stderr) return findings_data = fetch_findings(app_name, branch, report_fname) if findings_data and convert: crep_fname = utils.get_report_file("inspect", reports_dir, convert, ext_name="sarif") convertLib.convert_file("inspect", sl_args[1:], src, report_fname, crep_fname)