def wrapper(*args, **kwargs): start_time = time.time() value = func(*args, **kwargs) end_time = time.time() run_time = end_time - start_time hours, minutes, seconds = get_run_duration(run_time) main_logger.info( f"{func.__name__}() completed in: {hours}h {minutes}m {seconds}s") return value
def docker_logout(): """ Logout of the registry. """ main_logger.info(f"#### Logout of {JFROG_REGISTRY} ####") run_subprocess( f"docker logout {JFROG_REGISTRY}", logger=main_logger, )
def dynamic_scan(args): """ Prepare and run the dynamic scan. Args: args ([dict]): the arguments passed to the script """ # get the image tag image_tags = get_latest_stable_image_tags() # remove the old scans old_scan_status_dict = remove_old_scans(SINGLE_DYNAMIC) # spin up the containers (rt and db2), if # there is no scan in pending statuses for status in old_scan_status_dict.values(): if status in PENDING_STATUSES: return # prep containers for the scans prep_containers(args, image_tags) # create the new scans main_logger.info(f"Create new scan for: {APP_URL_DICT}") for app, url in APP_URL_DICT.items(): user = "******" if app != "WSC" else "csmith" passwd = "password" if app != "WSC" else "csmith" # scan data create_scan_data = { "StartingUrl": url, "LoginUser": user, "LoginPassword": passwd, "ScanType": "Production", "PresenceId": PRESENCE_ID, "IncludeVerifiedDomains": "true", "HttpAuthUserName": "******", "HttpAuthPassword": "******", "HttpAuthDomain": "string", "OnlyFullResults": "true", "TestOptimizationLevel": "NoOptimization", "ScanName": f"{app} Scan", "EnableMailNotification": "false", "Locale": "en-US", "AppId": SINGLE_DYNAMIC, "Execute": "true", "Personal": "false", } # creating a new scan main_logger.info(f"Creating a new scan for {app}...") res = requests.post(f"{ASOC_API_ENDPOINT}/Scans/DynamicAnalyzer", json=create_scan_data, headers=headers) main_logger.debug(res)
def docker_login(): """ Login to the registry. """ main_logger.info(f"#### Login to {JFROG_REGISTRY} ####") main_logger.info( f"docker login -u {JFROG_USER} -p {JFROG_APIKEY} {JFROG_REGISTRY}") run_subprocess( f"docker login -u {JFROG_USER} -p {JFROG_APIKEY} {JFROG_REGISTRY}", logger=main_logger, )
def init_argparse(): """ Init arguments for the script Raises: e: error thrown when init the arguments Returns: [dict]: dict of arguments """ try: parser = argparse.ArgumentParser( description= "Automator for static, dynamic scan, and dependency check.", formatter_class=ArgumentDefaultsHelpFormatter, epilog="Have a nice day! :)", ) subparsers = parser.add_subparsers(title="mode", dest="mode", description="automator mode to run", required=True) # create subparsers for mode in [SCAN, REPORTS, DEPCHECK]: mode_parser = subparsers.add_parser(mode) add_optionals_args(mode_parser) if mode == DEPCHECK: add_version_arg(mode_parser) add_output_arg(mode_parser) else: mode_subparser = mode_parser.add_subparsers( title="type", dest="type", description="type of scan to run", required=True) for type in [ALL, STATIC, DYNAMIC]: type_parser = mode_subparser.add_parser(type) add_optionals_args(type_parser) if mode == SCAN: if type == ALL or type == STATIC: add_source_arg(type_parser, required=True) if type == ALL or type == DYNAMIC: add_version_arg(type_parser) if mode == REPORTS: add_output_arg(type_parser) arguments = parser.parse_args() except argparse.ArgumentError as e: main_logger.error("Error parsing arguments") raise e else: main_logger.info(f"Arguments have been parsed: {arguments}") return arguments
def remove_old_scans(app_id): """ Remove old scan by calling the ASoC API. Args: app_id ([str]): the application id that the scans belong to Returns: [dict]: the scans with their statuses """ # read the old scan ids old_scans = get_scans(app_id) scan_status_dict = {} scans_pending = False # if any of the scan in the app is still running or # in InQueue, Paused, Pausing, Stopping status, # do not remove the scan and return the old scans # with their current statuses (as a dict) for old_scan in old_scans: scan_status_dict[ old_scan["Name"]] = old_scan["LatestExecution"]["Status"] if old_scan["LatestExecution"]["Status"] in PENDING_STATUSES: scans_pending = True if scans_pending: main_logger.warning("Scan(s) pending. Returning...") return scan_status_dict # remove the old scans from the app before creating new ones for old_scan in old_scans: main_logger.info(f"Removing {old_scan['Name']} - {old_scan['Id']}... ") try: _ = requests.delete( f"{ASOC_API_ENDPOINT}/Scans/{old_scan['Id']}?deleteIssues=true", headers=headers, ) except Exception as e: main_logger.warning(e) # reset the app try: main_logger.info(f"Resetting app {app_id}") reset_app_config_data = {"DeleteIssues": "true"} _ = requests.delete( f"{ASOC_API_ENDPOINT}/Apps/{app_id}/Reset", json=reset_app_config_data, headers=headers, ) except Exception as e: main_logger.warning(e) return scan_status_dict
def download_depcheck_tool(download_dir): """ Download depcheck tool. Args: download_dir ([str]): the directory to download the depcheck tool to """ main_logger.info("Downloading updated dependency check tool...") res = requests.get(DEPCHECK_REPO) tag_name = res.json()["tag_name"].replace("v", "") download_url = f"https://github.com/jeremylong/DependencyCheck/releases/download/v{tag_name}/dependency-check-{tag_name}-release.zip" res = requests.get(download_url, allow_redirects=True) zip = zipfile.ZipFile(io.BytesIO(res.content)) zip.extractall(f"{download_dir}") run_subprocess( f"chmod +x {download_dir}/dependency-check/bin/dependency-check.sh")
def wrapper(*args, **kwargs): try: args_repr = [repr(a) for a in args] kwargs_repr = [f"{k}={v!r}" for k, v in kwargs.items()] signature = ", ".join(args_repr + kwargs_repr) main_logger.info(f"START - {func.__name__}") main_logger.debug(f"{func.__name__}({signature})") value = func(*args, **kwargs) main_logger.debug(f"{func.__name__!r} returned {value!r}") return value except Exception as e: main_logger.error(traceback.format_exc()) main_logger.error(f"ERROR - {func.__name__} : {e}") raise finally: main_logger.info(f"END - {func.__name__}") sys.stdout.flush()
def get_latest_stable_image_tags(): """ Get latest stable image tag. Returns: [str]: the latest available stable urls """ image_tags = [] stable_build_urls = fetch_available_build_urls(SINGLE_STREAM_RSS_URL) for build_url in stable_build_urls: res = requests.get(build_url, auth=get_auth(build_url)) soup = BeautifulSoup(res.text, "html.parser") title_soup = soup.find("title") title = title_soup.text image_tags.append(title.split(" ")[1].lower()) main_logger.info(f"Latest image tags: {image_tags}") return image_tags
def build_source_code(args): """ Build the source code to prep for the scans. Args: args ([dict]): the arguments passed to the script """ # main_logger.info("Setting up environment...") # run_subprocess(f"cd {args.source}/Build && ./gradlew -b fullbuild.gradle setupEnvironment --stacktrace") # main_logger.info("Setting 3rd party libs...") # run_subprocess(f"cd {args.source}/Build && ./gradlew -b fullbuild.gradle unpack3p") # main_logger.info("Cleaning projects...") # run_subprocess(f"cd {args.source} && Build/gradlew clean") main_logger.info("Removing irx files...") run_subprocess(f'cd {args.source} && find . -name "*.irx" -type f -delete')
def wait_for_report(report): """ Wait for the generated report to be ready. Args: report ([dict]): the report to download """ while True: res = requests.get(f"{ASOC_API_ENDPOINT}/Reports/{report['Id']}", headers=headers) if res.status_code != 200: break if res.status_code == 200 and res.json()["Status"] == "Ready": break main_logger.info( f"Report for {report['Name']} is not ready. Waiting...") time.sleep(TIME_TO_SLEEP)
def main(): try: args = parse_arguments() main_logger.info(args) if args.mode == SCAN: run_scan(args) elif args.mode == REPORTS: get_reports(args) elif args.mode == DEPCHECK: depcheck(args) except Exception as e: main_logger.info(e) try: run_subprocess(f"docker network rm {NETWORK_SCAN}") except Exception as _: main_logger.warning(f"Error removing {NETWORK_SCAN}") try: run_subprocess(f"docker volume rm {VOL_SCAN}") except Exception as _: main_logger.warning(f"Error removing {VOL_SCAN}")
def cleanup(args): """ Cleaning up the resources before creating new containers. The will do the followings: - get the image list to remove - remove rt and db2 containers - remove volume and network - remove images Args: args ([dict]): the arguments passed to the script """ # clean up before creating new containers remove_images = get_remove_image_list(args) if len(remove_images) == 0: return # disconnect the containers and network main_logger.info( f"Disconnecting runtime container {RT_SCAN} from network {NETWORK_SCAN}..." ) cleanup_helper(f"docker network disconnect -f {NETWORK_SCAN} {RT_SCAN}") main_logger.info( f"Disconnecting db2 container {DB2_SCAN} from network {NETWORK_SCAN}..." ) cleanup_helper(f"docker network disconnect -f {NETWORK_SCAN} {DB2_SCAN}") # removing runtime container main_logger.info(f"Removing runtime container {RT_SCAN}...") cleanup_helper(f"docker rm -f {RT_SCAN}") # removing runtime container try: main_logger.info(f"Removing db2 container {DB2_SCAN}...") cleanup_helper(f"docker rm -f {DB2_SCAN}") except Exception as e: main_logger.info(e) # removing runtime container try: main_logger.info(f"Removing volume {VOL_SCAN}...") cleanup_helper(f"docker volume rm -f {VOL_SCAN}") except Exception as e: main_logger.info(e) # removing scan network try: main_logger.info(f"Removing network {NETWORK_SCAN}") run_subprocess(f"docker network rm {NETWORK_SCAN}") except Exception as e: main_logger.info(e) # removing images for image in remove_images: try: main_logger.info(f"Removing image {image}...") cleanup_helper(f"docker rmi {image}") except Exception as e: main_logger.info(e)
def prep_containers(args, image_tags): """ Prepare the rt and db2 container. This function will do the followings: - login to the registry - start db2 and rt containers - build the ear for deployment - start liberty server - wait for the server to be ready - logout of the registry Args: args ([dict]): the arguments passed to the script image_tag ([str]): the tag of the image """ # clean up cleanup(args) # login to registry docker_login() # starting db2 and rt containers main_logger.info("Starting db2 and rt containers...") for image_tag in image_tags: try: print() main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING)) main_logger.info(" " * int((PADDING / 2)) + f"Trying {image_tag}" + " " * int((PADDING / 2))) main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING)) main_logger.info("Starting db2 and rt containers...") start_db2_container(args, image_tag) start_rt_container(args, image_tag) break except Exception as e: main_logger.warning(e) # build the ear main_logger.info("Building ear file...") run_subprocess( f'docker exec {RT_SCAN} bash -lc "buildear -warfiles=smcfs,sbc,sma,isccs,wsc"' ) # start liberty server main_logger.info("Starting liberty server...") run_subprocess(f'docker exec {RT_SCAN} bash -lc "__lbstart"') # wait for deployment to be ready main_logger.info("Wait for deployment to be ready...") main_logger.info( f"Checking deployment @ {DEPLOY_SERVER}/smcfs/console/login.jsp...") wait_for_deployment() # check to see if we need to restart the server if needs_server_restart(): # restart the server main_logger.info("Restarting liberty server...") run_subprocess( f'docker exec {RT_SCAN} bash -lc "__lbstop && __lbstart"') # wait again for deployment to be ready after restarting main_logger.info( "Waiting again for deployment to be ready after restarting...") main_logger.info( f"Checking deployment @ {DEPLOY_SERVER}/smcfs/console/login.jsp..." ) wait_for_deployment() main_logger.info("The db2 and rt containers are up and running...") # logout of registry docker_logout()
def depcheck(args): """ Run and export report for the dependency check. Args: args ([dict]): the arguments passed to the script """ try: # get the image tag image_tags = get_latest_stable_image_tags() # start runtime container try: for image_tag in image_tags: print() main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING)) main_logger.info(" " * int((PADDING / 2)) + f"Trying {image_tag}" + " " * int((PADDING / 2))) main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING)) try: start_rt_container(args, image_tag, rt_name=DEPCHECK_SCAN) except Exception as e: main_logger.warning(e) continue break except Exception as e: main_logger.warning(e) # build the ear main_logger.info("Building ear file...") run_subprocess( f'docker exec {DEPCHECK_SCAN} bash -lc "buildear -warfiles=smcfs,sbc,sma,isccs,wsc"' ) # creating the source dir with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpdir: # copy the ear to tempdir main_logger.info("Copying the ear to tempdir...") run_subprocess( f"docker cp {DEPCHECK_SCAN}:/opt/ssfs/runtime/external_deployments/smcfs.ear {tmpdir}" ) # extract war files from the ear run_subprocess(f"cd {tmpdir} && unzip smcfs.ear *.war") # extract jars apps = ["smcfs", "sma", "sbc", "isccs", "wsc"] create_dir(f"{tmpdir}/3rdpartyship") for app in apps: if app == "smcfs": run_subprocess( f"cd {tmpdir} && mkdir {app}jarsfolder && unzip -o -j smcfs.war yfscommon/* -d {app}jarsfolder/ -x yfscommon/platform* -x yfscommon/smcfs* -x yfscommon/*.properties -x yfscommon/*ui.jar -x yfscommon/yantra* -x yfscommon/scecore* -x yfscommon/yc*" ) else: run_subprocess( f"cd {tmpdir} && mkdir {app}jarsfolder && unzip -o -j sma.war WEB-INF/lib/* -d {app}jarsfolder/ -x WEB-INF/lib/platform*" ) run_subprocess( f"cp -R {tmpdir}/{app}jarsfolder/* {tmpdir}/3rdpartyship") # download the latest depcheck download_depcheck_tool(tmpdir) # run dependency check reports_dir_path = f"reports/{get_date_str()}/{args.mode}" create_dir(reports_dir_path) run_subprocess( f"{tmpdir}/dependency-check/bin/dependency-check.sh -s {tmpdir}/3rdpartyship -o {reports_dir_path}/dependency_report.html --suppression {os.getcwd()}/suppressions.xml" ) # copy reports to output directory run_subprocess( f"rsync -a -v --ignore-existing {os.getcwd()}/reports {args.output}" ) except Exception as e: main_logger.warning(traceback.format_exc()) main_logger.warning(e) run_subprocess(f"docker rm -f {DEPCHECK_SCAN}") finally: run_subprocess(f"docker rm -f {DEPCHECK_SCAN}")
def static_scan(args): """ Prepare and run the static scan. Args: args ([dict]): the arguments passed to the script """ # prepare the header for requests file_req_header = {"Authorization": f"Bearer {get_bearer_token()}"} # remove the old scans old_scan_status_dict = remove_old_scans(SINGLE_STATIC) # build source code main_logger.info(f"Building source code...") build_source_code(args) # read the list of projects to scan main_logger.info(f"Getting the projects...") projects = get_projects() # the below block of code would do: # - create tempdir to store the config files # - go through the list of projects # - generate the irx file for each project # - upload the generated irx file to ASoC # - create and execute the static scan with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpdir: main_logger.debug(f"PROJECTS TO SCAN: {projects}") for project in projects: project = project.strip() project_file_name = project.strip().replace("/", "_") print() main_logger.info( "#" * (len(f"PROCESSING PROJECT: {project} - {project_file_name}") + PADDING)) main_logger.info( " " * int((PADDING / 2)) + f"PROCESSING PROJECT: {project} - {project_file_name}" + " " * int((PADDING / 2)), ) main_logger.info( "#" * (len(f"PROCESSING PROJECT: {project} - {project_file_name}") + PADDING)) # if the old scan still pending, skip if (project in old_scan_status_dict and old_scan_status_dict[project] in PENDING_STATUSES): main_logger.info(f"{project} is PENDING/RUNNING") continue # generate config file for appscan generate_appscan_config_file(args, project) main_logger.info(f"Generating {project_file_name}.irx file...") run_subprocess( f"source ~/.bashrc && appscan.sh prepare -c {APPSCAN_CONFIG_TMP} -n {project_file_name}.irx -d {tmpdir}" ) # call ASoC API to create the static scan try: main_logger.info( f"Calling ASoC API to create the static scan...") with open(f"{tmpdir}/{project_file_name}.irx", "rb") as irx_file: file_data = {"fileToUpload": irx_file} res = requests.post(f"{ASOC_API_ENDPOINT}/FileUpload", files=file_data, headers=file_req_header) if res.status_code == 201: data = { "ARSAFileId": res.json()["FileId"], "ScanName": project, "AppId": SINGLE_STATIC, "Locale": "en-US", "Execute": "true", "Personal": "false", } res = requests.post( f"{ASOC_API_ENDPOINT}/Scans/StaticAnalyzer", json=data, headers=headers) main_logger.info(f"Response: {res.json()}") main_logger.info( f"PROJECT: {project} - {project_file_name} WAS PROCESSED SUCCESSFULLY." ) print() except Exception as e: main_logger.warning(traceback.format_exc()) main_logger.warning(e)