def generate_ddc_data(): pblog.info( "Generating DDC data, please wait... (This may take up to one hour only for the initial run)" ) current_version = get_engine_version_with_prefix() if current_version is not None: engine_install_root = get_engine_install_root() installation_dir = os.path.join(engine_install_root, current_version) if os.path.isdir(installation_dir): ue_editor_executable = os.path.join(installation_dir, ue4_editor_relative_path) if os.path.isfile(ue_editor_executable): err = subprocess.run([ str(ue_editor_executable), os.path.join(os.getcwd(), pbconfig.get('uproject_name')), "-run=DerivedDataCache", "-fill" ], shell=True).returncode if err == 0: pblog.info(f"DDC generate command has exited with {err}") else: pblog.error(f"DDC generate command has exited with {err}") if not check_ddc_folder_created(): pbtools.error_state( "DDC folder doesn't exist. Please get support from #tech-support" ) return pblog.info("DDC data successfully generated!") return pbtools.error_state( "Error occurred while trying to read project version for DDC data generation. Please get support from #tech-support" )
def compare_md5_all(md5_json_file_path, print_log=False, ignored_extension=".zip"): hash_dict = get_dict_from_json(md5_json_file_path) if hash_dict is None or len(hash_dict) == 0: return False is_success = True for file_path in hash_dict: if not os.path.isfile(file_path): # If file doesn't exist, that means we fail the checksum if print_log: pblog.error(f"MD5 checksum failed for {file_path}") pblog.error("File does not exist") return False if ignored_extension in file_path: continue current_md5 = get_md5_hash(file_path) if hash_dict[file_path] == current_md5: if print_log: pblog.info(f"MD5 checksum successful for {file_path}") else: if print_log: pblog.error(f"MD5 checksum failed for {file_path}") pblog.error(f"Expected MD5: {hash_dict[file_path]}") pblog.error(f"Current MD5: {str(current_md5)}") is_success = False return is_success
def get_dict_from_json(json_file_path): try: with open(json_file_path, 'rb') as json_file: json_text = json_file.read() return json.loads(json_text) except Exception as e: pblog.error(str(e)) return None
def publish_handler(publish_val, dispatch_exec_path): if dispatch_exec_path is None: pblog.error( "--dispatch argument should be provided for --publish command") sys.exit(1) if not pbdispatch.push_build(publish_val, dispatch_exec_path, pbconfig.get('dispatch_config'), pbconfig.get('dispatch_stagedir'), pbconfig.get('dispatch_drm')): pblog.error("Something went wrong while pushing a new playable build.") sys.exit(1)
def error_state(msg=None, fatal_error=False): if msg is not None: pblog.error(msg) if fatal_error: # This is a fatal error, so do not let user run PBSync until issue is fixed with open(error_file, 'w') as error_state_file: error_state_file.write("1") pblog.info(f"Logs are saved in {pbconfig.get('log_file_path')}.") sys.exit(1)
def clean_handler(clean_val): if clean_val == "workspace": if pbtools.wipe_workspace(): pblog.info("Workspace wipe successful") else: pblog.error("Something went wrong while wiping the workspace") sys.exit(1) elif clean_val == "engine": if not pbunreal.clean_old_engine_installations(): pblog.error( "Something went wrong while cleaning old engine installations. You may want to clean them manually.") sys.exit(1)
def push_build(branch_type, dispath_exec_path, dispatch_config, dispatch_stagedir, dispatch_apply_drm_path): # Test if our environment variables exist app_id = os.environ.get('DISPATCH_APP_ID') if app_id is None or app_id == "": pblog.error("DISPATCH_APP_ID was not defined in the system environment.") return False if branch_type == "internal": branch_id_env = 'DISPATCH_INTERNAL_BID' elif branch_type == "playtester": branch_id_env = 'DISPATCH_PLAYTESTER_BID' pblog.error("Playtester builds are not allowed at the moment.") return False else: pblog.error("Unknown Dispatch branch type specified.") return False branch_id = os.environ.get(branch_id_env) if branch_id is None or branch_id == "": pblog.error(f"{branch_id_env} was not defined in the system environment.") return False executable_path = None for file in os.listdir(dispatch_apply_drm_path): if file.endswith(".exe"): executable_path = os.path.join(dispatch_apply_drm_path, str(file)) if executable_path is None: pblog.error(f"Executable {dispatch_apply_drm_path} not found while attempting to apply DRM wrapper.") return False if os.path.getsize(executable_path) > exec_max_allowed_size: executable_path = dispatch_apply_drm_path for i in range(3): executable_path = os.path.join(executable_path, "..") executable_path = os.path.abspath(executable_path) executable_path = os.path.join(executable_path, default_drm_exec_name) # Wrap executable with DRM if False: proc = pbtools.run_with_combined_output([dispath_exec_path, "build", "drm-wrap", app_id, executable_path]) pblog.info(proc.stdout) result = proc.returncode if result != 0: return False # Push & Publish the build proc = pbtools.run_with_combined_output([dispath_exec_path, "build", "push", branch_id, dispatch_config, dispatch_stagedir, "-p"]) pblog.info(proc.stdout) result = proc.returncode return result == 0
def printversion_handler(print_val, repository_val=None): if print_val == "latest-engine": if repository_val is None: pblog.error("--repository <URL> argument should be provided with --print latest-engine command") sys.exit(1) engine_version = pbunreal.get_latest_available_engine_version(str(repository_val)) if engine_version is None: sys.exit(1) print(engine_version, end="") elif print_val == "current-engine": engine_version = pbunreal.get_engine_version() if engine_version is None: sys.exit(1) print(engine_version, end="") elif print_val == "project": project_version = pbunreal.get_project_version() if project_version is None: sys.exit(1) print(project_version, end="")
def run_ue4versionator(bundle_name=None, download_symbols=False): required_free_gb = 7 if download_symbols: required_free_gb += 23 required_free_space = required_free_gb * 1000 * 1000 * 1000 root = get_engine_install_root() if root is not None and not pbconfig.get("is_ci"): total, used, free = disk_usage(root) if free < required_free_space: pblog.warning( "Not enough free space. Cleaning old engine installations before download." ) clean_old_engine_installations() total, used, free = disk_usage(root) if free < required_free_space: pblog.error( f"You do not have enough available space to install the engine. Please free up space on f{pathlib.Path(root).anchor}" ) available_gb = int(free / (1000 * 1000 * 1000)) pblog.error(f"Available space: {available_gb}GB") pblog.error(f"Total install size: {required_free_gb}GB") pblog.error( f"Required space: {int((free - required_free_space) / (1000 * 1000 * 1000))}" ) pbtools.error_state() command_set = ["ue4versionator.exe"] if not (bundle_name is None): command_set.append("-bundle") command_set.append(str(bundle_name)) if download_symbols: command_set.append("-with-symbols") if pbconfig.get("is_ci"): # If we're CI, use another config file command_set.append("-user-config") command_set.append(pbconfig.get("ue4v_ci_config")) return subprocess.run(command_set, shell=True).returncode
def compare_md5_single(compared_file_path, md5_json_file_path): current_hash = get_md5_hash(compared_file_path) if current_hash is None: return False dict_search_string = f".\\{compared_file_path}" hash_dict = get_dict_from_json(md5_json_file_path) if hash_dict is None or not (dict_search_string in hash_dict): pblog.error( f"Key {dict_search_string} not found in {md5_json_file_path}") return False if hash_dict[dict_search_string] == current_hash: pblog.info(f"MD5 checksum successful for {compared_file_path}") return True else: pblog.error(f"MD5 checksum failed for {compared_file_path}") pblog.error(f"Expected MD5: {hash_dict[compared_file_path]}") pblog.error(f"Current MD5: {str(current_hash)}") return False
def push_package(version_number, file_name): if not os.path.exists(file_name): pblog.error(f"Provided file {file_name} doesn't exist") return False try: output = pbtools.get_combined_output([hub_executable_path, "release", "edit", version_number, "-m", "", "-a", file_name]) if "Attaching 1 asset..." in output: return True else: pblog.error(output) except Exception as e: pblog.exception(str(e)) pblog.error(f"Error occurred while attaching {file_name} into release {version_number}") return False
def main(argv): parser = argparse.ArgumentParser(description=f"Project Borealis Workspace Synchronization Tool | PBpy Library Version: {pbpy_version.ver} | PBSync Program Version: {pbsync_version.ver}") parser.add_argument("--sync", help="Main command for the PBSync, synchronizes the project with latest changes from the repo, and does some housekeeping", choices=["all", "binaries", "engineversion", "engine", "force", "ddc"]) parser.add_argument("--printversion", help="Prints requested version information into console. latest-engine command needs --repository parameter", choices=["current-engine", "latest-engine", "project"]) parser.add_argument( "--repository", help="Required gcloud repository url for --printversion latest-engine and --sync engine commands") parser.add_argument("--autoversion", help="Automatic version update for project version", choices=["hotfix", "stable", "public"]) parser.add_argument("--clean", help="""Do cleanup according to specified argument. If engine is provided, old engine installations will be cleared If workspace is provided, workspace will be reset with latest changes from current branch (not revertible)""", choices=["engine", "workspace"]) parser.add_argument("--config", help=f"Path of config XML file. If not provided, ./{default_config_name} is used as default", default=default_config_name) parser.add_argument( "--push", help="Push provided file into release of current project version") parser.add_argument("--publish", help="Publishes a playable build with provided build type", choices=["internal", "playtester"]) parser.add_argument( "--dispatch", help="Required dispatch executable path for --publish command") parser.add_argument( "--bundle", help="Engine bundle name for --sync engine command. If not provided, ue4versionator will use the default bundle supplied by the config file") parser.add_argument( "--debugpath", help="If provided, PBSync will run in provided path") parser.add_argument( "--debugbranch", help="If provided, PBSync will use provided branch as expected branch") if len(argv) > 0: args = parser.parse_args(argv) else: pblog.error("At least one valid argument should be passed!") pblog.error("Did you mean to launch StartProject.bat?") input("Press enter to continue...") sys.exit(1) if not (args.debugpath is None): # Work on provided debug path os.chdir(str(args.debugpath)) # Parser function object for PBSync config file def pbsync_config_parser_func(root): return { 'supported_git_version': root.find('git/version').text, 'supported_lfs_version': root.find('git/lfsversion').text, 'expected_branch_name': root.find('git/expectedbranch').text if args.debugbranch is None else str(args.debugbranch), 'lfs_lock_url': root.find('git/lfslockurl').text, 'git_url': root.find('git/url').text, 'checksum_file': root.find('git/checksumfile').text, 'log_file_path': root.find('log/file').text, 'ue4v_user_config': root.find('versionator/userconfig').text, 'ue4v_ci_config': root.find('versionator/ciconfig').text, 'ue4v_default_bundle': root.find('versionator/defaultbundle').text, 'ue4v_ci_bundle': root.find('versionator/cibundle').text, 'engine_base_version': root.find('project/enginebaseversion').text, 'uproject_name': root.find('project/uprojectname').text, 'defaultgame_path': root.find('project/defaultgameinipath').text, 'dispatch_config': root.find('dispatch/config').text, 'dispatch_drm': root.find('dispatch/drm').text, 'dispatch_stagedir': root.find('dispatch/stagedir').text } # Preparation config_handler(args.config, pbsync_config_parser_func) pblog.setup_logger(pbconfig.get('log_file_path')) # Do not process further if we're in an error state if pbtools.check_error_state(): pbtools.error_state(f"""Repository is currently in an error state. Please fix the issues in your workspace before running PBSync.\nIf you have already fixed the problem, you may remove {pbtools.error_file} from your project folder & run StartProject bat file again.""", True) # Parse args if not (args.sync is None): sync_handler(args.sync, args.repository, args.bundle) elif not (args.printversion is None): printversion_handler(args.printversion, args.repository) elif not (args.autoversion is None): autoversion_handler(args.autoversion) elif not (args.clean is None): clean_handler(args.clean) elif not (args.publish is None): publish_handler(args.publish, args.dispatch) elif not (args.push is None): push_handler(args.push) else: pblog.error("At least one valid argument should be passed!") pblog.error("Did you mean to launch StartProject.bat?") input("Press enter to continue...") sys.exit(1)
def push_handler(file_name): project_version = pbunreal.get_project_version() pblog.info(f"Attaching {file_name} into GitHub release {project_version}") if not pbhub.push_package(project_version, file_name): pblog.error(f"Error occurred while pushing package for release {project_version}") sys.exit(1)
def resolve_conflicts_and_pull(retry_count=0, max_retries=1): def should_attempt_auto_resolve(): return retry_count <= max_retries if retry_count: # wait a little bit if retrying (exponential) time.sleep(0.25 * (1 << retry_count)) # Disable watchman for now disable_watchman() out = get_combined_output(["git", "status", "--ahead-behind"]) pblog.info(out) if "ahead" not in out: pblog.info( "Please wait while getting the latest changes from the repository. It may take a while..." ) # Make sure upstream is tracked correctly pbgit.set_tracking_information(pbgit.get_current_branch_name()) pblog.info("Trying to stash local work...") proc = run_with_combined_output(["git", "stash"]) out = proc.stdout stashed = proc.returncode == 0 and "Saved working directory and index state" in out pblog.info(out) pblog.info( "Trying to rebase workspace with the latest changes from the repository..." ) result = run_with_combined_output( ["git", "pull", "--rebase", "--no-autostash"]) # TODO: autostash handling # pblog.info("Trying to rebase workspace with latest changes on the repository...") # result = run_with_combined_output(["git", "pull", "--rebase", "--autostash"]) code = result.returncode out = result.stdout pblog.info(out) out = out.lower() error = code != 0 else: stashed = False error = False def pop_if_stashed(): if stashed: pbgit.stash_pop() def handle_success(): pop_if_stashed() # ensure we pull LFS run(["git", "lfs", "pull"]) pblog.success( "Success! You are now on the latest changes without any conflicts." ) def handle_error(msg=None): pbgit.abort_all() pop_if_stashed() error_state(msg, fatal_error=True) if not error: handle_success() elif "fast-forwarded" in out: handle_success() elif "up to date" in out: handle_success() elif "rewinding head" in out and not ("error" in out or "conflict" in out): handle_success() elif "successfully rebased and updated" in out: handle_success() elif "failed to merge in the changes" in out or "could not apply" in out: handle_error( "Aborting the rebase. Changes on one of your commits will be overridden by incoming changes. Please request help in #tech-support to resolve conflicts, and please do not run StartProject.bat until the issue is resolved." ) elif "unmerged files" in out or "merge_head exists" in out: # we can't abort anything, but don't let stash linger to restore the original repo state pop_if_stashed() error_state( "You are in the middle of a merge. Please request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved.", fatal_error=True) elif "unborn" in out: if should_attempt_auto_resolve(): pblog.error("Unborn branch detected. Retrying...") retry_count += 1 resolve_conflicts_and_pull(retry_count) return else: handle_error( "You are on an unborn branch. Please request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved." ) elif "no remote" in out or "no such remote" in out or "refspecs without repo" in out: if should_attempt_auto_resolve(): pblog.error("Remote repository not found. Retrying...") retry_count += 1 resolve_conflicts_and_pull(retry_count, 2) return else: handle_error( "The remote repository could not be found. Please request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved." ) elif "cannot open" in out: if should_attempt_auto_resolve(): pblog.error("Git file info could not be read. Retrying...") retry_count += 1 resolve_conflicts_and_pull(retry_count, 3) return else: handle_error( "Git file info could not be read. Please request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved." ) else: # We have no idea what the state of the repo is. Do nothing except bail. error_state( "Aborting the repo update because of an unknown error. Request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved.", fatal_error=True) # only prune if we don't have a stash out = get_combined_output(["git", "stash", "list"]) if len(out) < 3: if os.name == "nt": subprocess.Popen("git lfs prune -c ; git lfs dedup", shell=True, creationflags=subprocess.DETACHED_PROCESS) elif os.name == "posix": subprocess.Popen("nohup git lfs prune -c || nohup git lfs dedup", shell=True)
def sync_handler(sync_val: str, repository_val=None, requested_bundle_name=None): sync_val = sync_val.lower() if sync_val == "all" or sync_val == "force": # Firstly, check our remote connection before doing anything remote_state, remote_url = pbgit.check_remote_connection() if not remote_state: pbtools.error_state( f"Remote connection was not successful. Please verify that you have a valid git remote URL & internet connection. Current git remote URL: {remote_url}") else: pblog.info("Remote connection is up") pblog.info("------------------") pblog.info(f"Executing {sync_val} sync command") pblog.info(f"PBpy Library Version: {pbpy_version.ver}") pblog.info(f"PBSync Program Version: {pbsync_version.ver}") pblog.info("------------------") detected_git_version = pbgit.get_git_version() needs_git_update = False if detected_git_version == pbconfig.get('supported_git_version'): pblog.info(f"Current Git version: {detected_git_version}") else: pblog.error("Git is not updated to the supported version in your system") pblog.error(f"Supported Git Version: {pbconfig.get('supported_git_version')}") pblog.error(f"Current Git Version: {detected_git_version}") pblog.error("Please install the supported Git version from https://github.com/microsoft/git/releases") pblog.error("Visit https://github.com/ProjectBorealisTeam/pb/wiki/Prerequisites for installation instructions") if os.name == "nt": webbrowser.open(f"https://github.com/microsoft/git/releases/download/v{pbconfig.get('supported_git_version')}/Git-{pbconfig.get('supported_git_version')}-64-bit.exe") needs_git_update = True if os.name == "nt": # find Git/cmd/git.exe git_paths = [path for path in pbtools.whereis("git") if "cmd" in path.parts] if len(git_paths) > 0: bundled_git_lfs = False is_admin = pbuac.isUserAdmin() delete_paths = [] for git_path in git_paths: # find Git from Git/cmd/git.exe git_root = git_path.parents[1] possible_lfs_paths = ["cmd/git-lfs.exe", "mingw64/bin/git-lfs.exe", "mingw64/libexec/git-core/git-lfs.exe"] for possible_lfs_path in possible_lfs_paths: path = git_root / possible_lfs_path if path.exists(): try: if is_admin: path.unlink() else: delete_paths.append(str(path)) except FileNotFoundError: pass except OSError: pblog.error(f"Git LFS is bundled with Git, overriding your installed version. Please remove {path}.") bundled_git_lfs = True if not is_admin and len(delete_paths) > 0: pblog.info("Requesting permission to delete bundled Git LFS which is overriding your installed version...") quoted_paths = [f'"{path}"' for path in delete_paths] delete_cmdline = ["cmd.exe", "/c", "DEL", "/q", "/f"] + quoted_paths try: ret = pbuac.runAsAdmin(delete_cmdline) except OSError: pblog.error("User declined permission. Automatic delete failed.") for delete_path in delete_paths: path = pathlib.Path(delete_path) if path.exists(): bundled_git_lfs = True pblog.error(f"Git LFS is bundled with Git, overriding your installed version. Please remove {path}.") if bundled_git_lfs: pbtools.error_state() detected_lfs_version = pbgit.get_lfs_version() if detected_lfs_version == pbconfig.get('supported_lfs_version'): pblog.info(f"Current Git LFS version: {detected_lfs_version}") else: pblog.error("Git LFS is not updated to the supported version in your system") pblog.error(f"Supported Git LFS Version: {pbconfig.get('supported_lfs_version')}") pblog.error(f"Current Git LFS Version: {detected_lfs_version}") pblog.error("Please install the supported Git LFS version from https://git-lfs.github.com") if os.name == "nt": supported_lfs_version = pbconfig.get('supported_lfs_version').split("/")[1] webbrowser.open(f"https://github.com/git-lfs/git-lfs/releases/download/v{supported_lfs_version}/git-lfs-windows-v{supported_lfs_version}.exe") needs_git_update = True if needs_git_update: pbtools.error_state() pblog.info("------------------") # Do not execute if Unreal Editor is running if pbtools.get_running_process("UE4Editor") is not None: pbtools.error_state("Unreal Editor is currently running. Please close it before running PBSync. It may be listed only in Task Manager as a background process. As a last resort, you should log off and log in again.") current_branch = pbgit.get_current_branch_name() # repo was already fetched in StartProject.bat if current_branch != "promoted": pblog.info("Fetching recent changes on the repository...") fetch_base = ["git", "fetch", "origin"] branches = {"promoted", "master", "trunk", current_branch} fetch_base.extend(branches) pbtools.get_combined_output(fetch_base) # Do some housekeeping for git configuration pbgit.setup_config() # Check if we have correct credentials pbgit.check_credentials() pblog.info("------------------") # Execute synchronization part of script if we're on the expected branch, or force sync is enabled is_on_expected_branch = pbgit.compare_with_current_branch_name(pbconfig.get('expected_branch_name')) if sync_val == "force" or is_on_expected_branch: pbtools.resolve_conflicts_and_pull() pblog.info("------------------") project_version = pbunreal.get_project_version() if project_version is not None: pblog.info(f"Current project version: {project_version}") else: pbtools.error_state( "Something went wrong while fetching project version. Please request help in #tech-support.") if pbhub.is_pull_binaries_required(): pblog.info("Binaries are not up to date, trying to pull new binaries...") ret = pbhub.pull_binaries(project_version) if ret == 0: pblog.info("Binaries were pulled successfully") elif ret < 0: pbtools.error_state("Binaries pull failed, please view log for instructions.") elif ret > 0: pbtools.error_state("An error occurred while pulling binaries. Please request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved.", True) else: pblog.info("Binaries are up-to-date") else: pblog.warning(f"Current branch is not supported for repository synchronization: {pbgit.get_current_branch_name()}. Auto synchronization " "will be disabled") pblog.info("------------------") pblog.info("Checking for engine updates...") if pbgit.sync_file("ProjectBorealis.uproject") != 0: pbtools.error_state( "Something went wrong while updating the .uproject file. Please request help in #tech-support.") engine_version = pbunreal.get_engine_version(False) pblog.info("Trying to register current engine build if it exists. Otherwise, the build will be downloaded...") symbols_needed = pbunreal.is_versionator_symbols_enabled() bundle_name = pbconfig.get("ue4v_default_bundle") if pbunreal.run_ue4versionator(bundle_name, symbols_needed) != 0: pblog.error(f"Something went wrong while registering engine build {bundle_name}-{engine_version}. Please request help in #tech-support.") sys.exit(1) else: pblog.info(f"Engine build {bundle_name}-{engine_version} successfully registered") # Clean old engine installations, do that only in expected branch if is_on_expected_branch: if pbunreal.clean_old_engine_installations(): pblog.info("Old engine installations are successfully cleaned") else: pblog.warning("Something went wrong while cleaning old engine installations. You may want to clean them manually.") pblog.info("------------------") if pbunreal.check_ue4_file_association(): try: os.startfile(os.path.normpath(os.path.join(os.getcwd(), "ProjectBorealis.uproject"))) except NotImplementedError: pblog.info("You may now launch ProjectBorealis.uproject with Unreal Engine 4.") else: pbtools.error_state(".uproject extension is not correctly set into Unreal Engine. Make sure you have Epic Games Launcher installed. If problem still persists, please get help in #tech-support.") elif sync_val == "engineversion": if repository_val is None: pblog.error("--repository <URL> argument should be provided with --sync engine command") sys.exit(1) engine_version = pbunreal.get_latest_available_engine_version(str(repository_val)) if engine_version is None: pblog.error("Error while trying to fetch latest engine version") sys.exit(1) if not pbunreal.set_engine_version(engine_version): pblog.error("Error while trying to update engine version in .uproject file") sys.exit(1) pblog.info(f"Successfully changed engine version as {str(engine_version)}") elif sync_val == "ddc": pbunreal.generate_ddc_data() elif sync_val == "binaries": project_version = pbunreal.get_project_version() ret = pbhub.pull_binaries(project_version, True) if ret == 0: pblog.info(f"Binaries for {project_version} pulled & extracted successfully") else: pblog.error(f"Failed to pull binaries for {project_version}") sys.exit(1) elif sync_val == "engine": # Pull engine build with ue4versionator & register it if requested_bundle_name is None: requested_bundle_name = pbconfig.get("ue4v_default_bundle") engine_version = pbunreal.get_engine_version(False) if pbunreal.run_ue4versionator(requested_bundle_name) != 0: pblog.error(f"Something went wrong while registering engine build {requested_bundle_name}-{engine_version}") sys.exit(1) else: pblog.info(f"Engine build {requested_bundle_name}-{engine_version} successfully registered")
def autoversion_handler(autoversion_val): if pbunreal.project_version_increase(autoversion_val): pblog.info("Successfully increased project version") else: pblog.error("Error occurred while trying to increase project version") sys.exit(1)
def pull_binaries(version_number: str, pass_checksum=False): if not os.path.isfile(hub_executable_path): pblog.error(f"Hub executable is not found at {hub_executable_path}") return 1 # Backward compatibility with old PBGet junctions. If it still exists, remove the junction if pbtools.is_junction("Binaries") and not pbtools.remove_junction("Binaries"): pblog.error("Something went wrong while removing junction for 'Binaries' folder. You should remove that folder manually to solve the problem") return -1 # Remove binary package if it exists, hub is not able to overwrite existing files if os.path.exists(binary_package_name): try: os.remove(binary_package_name) except Exception as e: pblog.exception(str(e)) pblog.error(f"Exception thrown while trying to remove {binary_package_name}. Please remove it manually") return -1 if not os.path.isfile(hub_config_path): pblog.info("You will now be asked to log in to your GitHub account. Please note that for security reasons, your password will not be shown as you type it.") # If user didn't login with hub yet, do it now for once output = pbtools.run([hub_executable_path, "release", "-L", "1"]) if not os.path.isfile(hub_config_path): pblog.error("Failed to login into hub with git credentials. Please check if your provided credentials are valid.") return pull_binaries(version_number, pass_checksum) else: pblog.info("Login to hub API was successful") try: output = pbtools.get_combined_output([hub_executable_path, "release", "download", version_number, "-i", binary_package_name]) if f"Downloading {binary_package_name}" in output: pass elif "Unable to find release with tag name" in output: pblog.error(f"Failed to find release tag {version_number}. Please wait and try again later.") return -1 elif "The file exists" in output: pblog.error(f"File {binary_package_name} was not able to be overwritten. Please remove it manually and run StartProject again.") return -1 elif "did not match any available assets" in output: pblog.error("Binaries for release {version_number} are not pushed into GitHub yet. Please wait and try again later.") return -1 elif not output: # hub doesn't print any output if package doesn't exist in release pblog.error(f"Failed to find binary package for release {version_number}") return 1 else: pblog.error(f"Unknown error occurred while pulling binaries for release {version_number}") pblog.error(f"Command output was: {output}") return 1 except Exception as e: pblog.exception(str(e)) pblog.error( f"Exception thrown while trying do pull binaries for {version_number}") return 1 # Temp fix for Binaries folder with unnecessary content if os.path.isdir("Binaries"): try: shutil.rmtree("Binaries") except Exception as e: pblog.exception(str(e)) pblog.error("Exception thrown while trying do clean Binaries folder") return 1 try: if pass_checksum: checksum_json_path = None else: checksum_json_path = pbconfig.get("checksum_file") if not os.path.exists(checksum_json_path): pblog.error(f"Checksum json file is not found at {checksum_json_path}") return 1 if not pbtools.compare_md5_single(binary_package_name, checksum_json_path): return 1 with ZipFile(binary_package_name) as zip_file: zip_file.extractall() if pass_checksum: return 0 elif not pbtools.compare_md5_all(checksum_json_path, True): return 1 except Exception as e: pblog.exception(str(e)) pblog.error(f"Exception thrown while trying do extract binary package for {version_number}") return 1 return 0