def cleanup_helper(cmd): """ Clean up helper to run the command passed by cleanup func Args: cmd ([str]): the command to to in subprocess """ try: run_subprocess(cmd) except Exception as e: main_logger.warning(e)
def main(): try: args = parse_arguments() main_logger.info(args) if args.mode == SCAN: run_scan(args) elif args.mode == REPORTS: get_reports(args) elif args.mode == DEPCHECK: depcheck(args) except Exception as e: main_logger.info(e) try: run_subprocess(f"docker network rm {NETWORK_SCAN}") except Exception as _: main_logger.warning(f"Error removing {NETWORK_SCAN}") try: run_subprocess(f"docker volume rm {VOL_SCAN}") except Exception as _: main_logger.warning(f"Error removing {VOL_SCAN}")
def remove_old_scans(app_id): """ Remove old scan by calling the ASoC API. Args: app_id ([str]): the application id that the scans belong to Returns: [dict]: the scans with their statuses """ # read the old scan ids old_scans = get_scans(app_id) scan_status_dict = {} scans_pending = False # if any of the scan in the app is still running or # in InQueue, Paused, Pausing, Stopping status, # do not remove the scan and return the old scans # with their current statuses (as a dict) for old_scan in old_scans: scan_status_dict[ old_scan["Name"]] = old_scan["LatestExecution"]["Status"] if old_scan["LatestExecution"]["Status"] in PENDING_STATUSES: scans_pending = True if scans_pending: main_logger.warning("Scan(s) pending. Returning...") return scan_status_dict # remove the old scans from the app before creating new ones for old_scan in old_scans: main_logger.info(f"Removing {old_scan['Name']} - {old_scan['Id']}... ") try: _ = requests.delete( f"{ASOC_API_ENDPOINT}/Scans/{old_scan['Id']}?deleteIssues=true", headers=headers, ) except Exception as e: main_logger.warning(e) # reset the app try: main_logger.info(f"Resetting app {app_id}") reset_app_config_data = {"DeleteIssues": "true"} _ = requests.delete( f"{ASOC_API_ENDPOINT}/Apps/{app_id}/Reset", json=reset_app_config_data, headers=headers, ) except Exception as e: main_logger.warning(e) return scan_status_dict
def prep_containers(args, image_tags): """ Prepare the rt and db2 container. This function will do the followings: - login to the registry - start db2 and rt containers - build the ear for deployment - start liberty server - wait for the server to be ready - logout of the registry Args: args ([dict]): the arguments passed to the script image_tag ([str]): the tag of the image """ # clean up cleanup(args) # login to registry docker_login() # starting db2 and rt containers main_logger.info("Starting db2 and rt containers...") for image_tag in image_tags: try: print() main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING)) main_logger.info(" " * int((PADDING / 2)) + f"Trying {image_tag}" + " " * int((PADDING / 2))) main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING)) main_logger.info("Starting db2 and rt containers...") start_db2_container(args, image_tag) start_rt_container(args, image_tag) break except Exception as e: main_logger.warning(e) # build the ear main_logger.info("Building ear file...") run_subprocess( f'docker exec {RT_SCAN} bash -lc "buildear -warfiles=smcfs,sbc,sma,isccs,wsc"' ) # start liberty server main_logger.info("Starting liberty server...") run_subprocess(f'docker exec {RT_SCAN} bash -lc "__lbstart"') # wait for deployment to be ready main_logger.info("Wait for deployment to be ready...") main_logger.info( f"Checking deployment @ {DEPLOY_SERVER}/smcfs/console/login.jsp...") wait_for_deployment() # check to see if we need to restart the server if needs_server_restart(): # restart the server main_logger.info("Restarting liberty server...") run_subprocess( f'docker exec {RT_SCAN} bash -lc "__lbstop && __lbstart"') # wait again for deployment to be ready after restarting main_logger.info( "Waiting again for deployment to be ready after restarting...") main_logger.info( f"Checking deployment @ {DEPLOY_SERVER}/smcfs/console/login.jsp..." ) wait_for_deployment() main_logger.info("The db2 and rt containers are up and running...") # logout of registry docker_logout()
def depcheck(args): """ Run and export report for the dependency check. Args: args ([dict]): the arguments passed to the script """ try: # get the image tag image_tags = get_latest_stable_image_tags() # start runtime container try: for image_tag in image_tags: print() main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING)) main_logger.info(" " * int((PADDING / 2)) + f"Trying {image_tag}" + " " * int((PADDING / 2))) main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING)) try: start_rt_container(args, image_tag, rt_name=DEPCHECK_SCAN) except Exception as e: main_logger.warning(e) continue break except Exception as e: main_logger.warning(e) # build the ear main_logger.info("Building ear file...") run_subprocess( f'docker exec {DEPCHECK_SCAN} bash -lc "buildear -warfiles=smcfs,sbc,sma,isccs,wsc"' ) # creating the source dir with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpdir: # copy the ear to tempdir main_logger.info("Copying the ear to tempdir...") run_subprocess( f"docker cp {DEPCHECK_SCAN}:/opt/ssfs/runtime/external_deployments/smcfs.ear {tmpdir}" ) # extract war files from the ear run_subprocess(f"cd {tmpdir} && unzip smcfs.ear *.war") # extract jars apps = ["smcfs", "sma", "sbc", "isccs", "wsc"] create_dir(f"{tmpdir}/3rdpartyship") for app in apps: if app == "smcfs": run_subprocess( f"cd {tmpdir} && mkdir {app}jarsfolder && unzip -o -j smcfs.war yfscommon/* -d {app}jarsfolder/ -x yfscommon/platform* -x yfscommon/smcfs* -x yfscommon/*.properties -x yfscommon/*ui.jar -x yfscommon/yantra* -x yfscommon/scecore* -x yfscommon/yc*" ) else: run_subprocess( f"cd {tmpdir} && mkdir {app}jarsfolder && unzip -o -j sma.war WEB-INF/lib/* -d {app}jarsfolder/ -x WEB-INF/lib/platform*" ) run_subprocess( f"cp -R {tmpdir}/{app}jarsfolder/* {tmpdir}/3rdpartyship") # download the latest depcheck download_depcheck_tool(tmpdir) # run dependency check reports_dir_path = f"reports/{get_date_str()}/{args.mode}" create_dir(reports_dir_path) run_subprocess( f"{tmpdir}/dependency-check/bin/dependency-check.sh -s {tmpdir}/3rdpartyship -o {reports_dir_path}/dependency_report.html --suppression {os.getcwd()}/suppressions.xml" ) # copy reports to output directory run_subprocess( f"rsync -a -v --ignore-existing {os.getcwd()}/reports {args.output}" ) except Exception as e: main_logger.warning(traceback.format_exc()) main_logger.warning(e) run_subprocess(f"docker rm -f {DEPCHECK_SCAN}") finally: run_subprocess(f"docker rm -f {DEPCHECK_SCAN}")
def static_scan(args): """ Prepare and run the static scan. Args: args ([dict]): the arguments passed to the script """ # prepare the header for requests file_req_header = {"Authorization": f"Bearer {get_bearer_token()}"} # remove the old scans old_scan_status_dict = remove_old_scans(SINGLE_STATIC) # build source code main_logger.info(f"Building source code...") build_source_code(args) # read the list of projects to scan main_logger.info(f"Getting the projects...") projects = get_projects() # the below block of code would do: # - create tempdir to store the config files # - go through the list of projects # - generate the irx file for each project # - upload the generated irx file to ASoC # - create and execute the static scan with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpdir: main_logger.debug(f"PROJECTS TO SCAN: {projects}") for project in projects: project = project.strip() project_file_name = project.strip().replace("/", "_") print() main_logger.info( "#" * (len(f"PROCESSING PROJECT: {project} - {project_file_name}") + PADDING)) main_logger.info( " " * int((PADDING / 2)) + f"PROCESSING PROJECT: {project} - {project_file_name}" + " " * int((PADDING / 2)), ) main_logger.info( "#" * (len(f"PROCESSING PROJECT: {project} - {project_file_name}") + PADDING)) # if the old scan still pending, skip if (project in old_scan_status_dict and old_scan_status_dict[project] in PENDING_STATUSES): main_logger.info(f"{project} is PENDING/RUNNING") continue # generate config file for appscan generate_appscan_config_file(args, project) main_logger.info(f"Generating {project_file_name}.irx file...") run_subprocess( f"source ~/.bashrc && appscan.sh prepare -c {APPSCAN_CONFIG_TMP} -n {project_file_name}.irx -d {tmpdir}" ) # call ASoC API to create the static scan try: main_logger.info( f"Calling ASoC API to create the static scan...") with open(f"{tmpdir}/{project_file_name}.irx", "rb") as irx_file: file_data = {"fileToUpload": irx_file} res = requests.post(f"{ASOC_API_ENDPOINT}/FileUpload", files=file_data, headers=file_req_header) if res.status_code == 201: data = { "ARSAFileId": res.json()["FileId"], "ScanName": project, "AppId": SINGLE_STATIC, "Locale": "en-US", "Execute": "true", "Personal": "false", } res = requests.post( f"{ASOC_API_ENDPOINT}/Scans/StaticAnalyzer", json=data, headers=headers) main_logger.info(f"Response: {res.json()}") main_logger.info( f"PROJECT: {project} - {project_file_name} WAS PROCESSED SUCCESSFULLY." ) print() except Exception as e: main_logger.warning(traceback.format_exc()) main_logger.warning(e)