def compare_md5_all(md5_json_file_path, print_log=False, ignored_extension=".zip"): hash_dict = get_dict_from_json(md5_json_file_path) if hash_dict is None or len(hash_dict) == 0: return False is_success = True for file_path in hash_dict: if not os.path.isfile(file_path): # If file doesn't exist, that means we fail the checksum if print_log: pblog.error(f"MD5 checksum failed for {file_path}") pblog.error("File does not exist") return False if ignored_extension in file_path: continue current_md5 = get_md5_hash(file_path) if hash_dict[file_path] == current_md5: if print_log: pblog.info(f"MD5 checksum successful for {file_path}") else: if print_log: pblog.error(f"MD5 checksum failed for {file_path}") pblog.error(f"Expected MD5: {hash_dict[file_path]}") pblog.error(f"Current MD5: {str(current_md5)}") is_success = False return is_success
def error_state(msg=None, fatal_error=False): if msg is not None: pblog.error(msg) if fatal_error: # This is a fatal error, so do not let user run PBSync until issue is fixed with open(error_file, 'w') as error_state_file: error_state_file.write("1") pblog.info(f"Logs are saved in {pbconfig.get('log_file_path')}.") sys.exit(1)
def clean_handler(clean_val): if clean_val == "workspace": if pbtools.wipe_workspace(): pblog.info("Workspace wipe successful") else: pblog.error("Something went wrong while wiping the workspace") sys.exit(1) elif clean_val == "engine": if not pbunreal.clean_old_engine_installations(): pblog.error( "Something went wrong while cleaning old engine installations. You may want to clean them manually.") sys.exit(1)
def push_build(branch_type, dispath_exec_path, dispatch_config, dispatch_stagedir, dispatch_apply_drm_path): # Test if our environment variables exist app_id = os.environ.get('DISPATCH_APP_ID') if app_id is None or app_id == "": pblog.error("DISPATCH_APP_ID was not defined in the system environment.") return False if branch_type == "internal": branch_id_env = 'DISPATCH_INTERNAL_BID' elif branch_type == "playtester": branch_id_env = 'DISPATCH_PLAYTESTER_BID' pblog.error("Playtester builds are not allowed at the moment.") return False else: pblog.error("Unknown Dispatch branch type specified.") return False branch_id = os.environ.get(branch_id_env) if branch_id is None or branch_id == "": pblog.error(f"{branch_id_env} was not defined in the system environment.") return False executable_path = None for file in os.listdir(dispatch_apply_drm_path): if file.endswith(".exe"): executable_path = os.path.join(dispatch_apply_drm_path, str(file)) if executable_path is None: pblog.error(f"Executable {dispatch_apply_drm_path} not found while attempting to apply DRM wrapper.") return False if os.path.getsize(executable_path) > exec_max_allowed_size: executable_path = dispatch_apply_drm_path for i in range(3): executable_path = os.path.join(executable_path, "..") executable_path = os.path.abspath(executable_path) executable_path = os.path.join(executable_path, default_drm_exec_name) # Wrap executable with DRM if False: proc = pbtools.run_with_combined_output([dispath_exec_path, "build", "drm-wrap", app_id, executable_path]) pblog.info(proc.stdout) result = proc.returncode if result != 0: return False # Push & Publish the build proc = pbtools.run_with_combined_output([dispath_exec_path, "build", "push", branch_id, dispatch_config, dispatch_stagedir, "-p"]) pblog.info(proc.stdout) result = proc.returncode return result == 0
def check_remote_connection(): current_url = pbtools.get_one_line_output( ["git", "remote", "get-url", "origin"]) recent_url = pbconfig.get("git_url") if current_url != recent_url: output = pbtools.get_combined_output( ["git", "remote", "set-url", "origin", recent_url]) pblog.info(output) current_url = pbtools.get_one_line_output( ["git", "remote", "get-url", "origin"]) out = pbtools.run_with_output(["git", "ls-remote", "--exit-code", "-h"]).returncode return out == 0, current_url
def stash_pop(): pblog.info("Trying to pop stash...") output = pbtools.get_combined_output(["git", "stash", "pop"]) pblog.info(output) lower_case_output = output.lower() if "auto-merging" in lower_case_output and "conflict" in lower_case_output and "should have been pointers" in lower_case_output: pbtools.error_state( """git stash pop failed. Some of your stashed local changes would be overwritten by incoming changes. Request help in #tech-support to resolve conflicts, and please do not run StartProject.bat until the issue is resolved.""", True) elif "dropped refs" in lower_case_output: return elif "no stash entries found" in lower_case_output: return else: pbtools.error_state( """git stash pop failed due to an unknown error. Request help in #tech-support to resolve possible conflicts, and please do not run StartProject.bat until the issue is resolved.""", True)
def compare_md5_single(compared_file_path, md5_json_file_path): current_hash = get_md5_hash(compared_file_path) if current_hash is None: return False dict_search_string = f".\\{compared_file_path}" hash_dict = get_dict_from_json(md5_json_file_path) if hash_dict is None or not (dict_search_string in hash_dict): pblog.error( f"Key {dict_search_string} not found in {md5_json_file_path}") return False if hash_dict[dict_search_string] == current_hash: pblog.info(f"MD5 checksum successful for {compared_file_path}") return True else: pblog.error(f"MD5 checksum failed for {compared_file_path}") pblog.error(f"Expected MD5: {hash_dict[compared_file_path]}") pblog.error(f"Current MD5: {str(current_hash)}") return False
def generate_ddc_data(): pblog.info( "Generating DDC data, please wait... (This may take up to one hour only for the initial run)" ) current_version = get_engine_version_with_prefix() if current_version is not None: engine_install_root = get_engine_install_root() installation_dir = os.path.join(engine_install_root, current_version) if os.path.isdir(installation_dir): ue_editor_executable = os.path.join(installation_dir, ue4_editor_relative_path) if os.path.isfile(ue_editor_executable): err = subprocess.run([ str(ue_editor_executable), os.path.join(os.getcwd(), pbconfig.get('uproject_name')), "-run=DerivedDataCache", "-fill" ], shell=True).returncode if err == 0: pblog.info(f"DDC generate command has exited with {err}") else: pblog.error(f"DDC generate command has exited with {err}") if not check_ddc_folder_created(): pbtools.error_state( "DDC folder doesn't exist. Please get support from #tech-support" ) return pblog.info("DDC data successfully generated!") return pbtools.error_state( "Error occurred while trying to read project version for DDC data generation. Please get support from #tech-support" )
def wipe_workspace(): current_branch = pbgit.get_current_branch_name() response = input( f"This command will wipe your workspace and get latest changes from {current_branch}. Are you sure? [y/N]" ) if response != "y" and response != "Y": return False pbgit.abort_all() disable_watchman() output = get_combined_output(["git", "fetch", "origin", current_branch]) pblog.info(output) proc = run_with_combined_output( ["git", "reset", "--hard", f"origin/{current_branch}"]) result = proc.returncode pblog.info(proc.stdout) output = get_combined_output(["git", "clean", "-fd"]) pblog.info(output) output = get_combined_output(["git", "pull"]) pblog.info(output) return result == 0
def set_tracking_information(upstream_branch_name: str): output = pbtools.get_combined_output([ "git", "branch", f"--set-upstream-to=origin/{upstream_branch_name}", upstream_branch_name ]) pblog.info(output)
def push_handler(file_name): project_version = pbunreal.get_project_version() pblog.info(f"Attaching {file_name} into GitHub release {project_version}") if not pbhub.push_package(project_version, file_name): pblog.error(f"Error occurred while pushing package for release {project_version}") sys.exit(1)
def sync_handler(sync_val: str, repository_val=None, requested_bundle_name=None): sync_val = sync_val.lower() if sync_val == "all" or sync_val == "force": # Firstly, check our remote connection before doing anything remote_state, remote_url = pbgit.check_remote_connection() if not remote_state: pbtools.error_state( f"Remote connection was not successful. Please verify that you have a valid git remote URL & internet connection. Current git remote URL: {remote_url}") else: pblog.info("Remote connection is up") pblog.info("------------------") pblog.info(f"Executing {sync_val} sync command") pblog.info(f"PBpy Library Version: {pbpy_version.ver}") pblog.info(f"PBSync Program Version: {pbsync_version.ver}") pblog.info("------------------") detected_git_version = pbgit.get_git_version() needs_git_update = False if detected_git_version == pbconfig.get('supported_git_version'): pblog.info(f"Current Git version: {detected_git_version}") else: pblog.error("Git is not updated to the supported version in your system") pblog.error(f"Supported Git Version: {pbconfig.get('supported_git_version')}") pblog.error(f"Current Git Version: {detected_git_version}") pblog.error("Please install the supported Git version from https://github.com/microsoft/git/releases") pblog.error("Visit https://github.com/ProjectBorealisTeam/pb/wiki/Prerequisites for installation instructions") if os.name == "nt": webbrowser.open(f"https://github.com/microsoft/git/releases/download/v{pbconfig.get('supported_git_version')}/Git-{pbconfig.get('supported_git_version')}-64-bit.exe") needs_git_update = True if os.name == "nt": # find Git/cmd/git.exe git_paths = [path for path in pbtools.whereis("git") if "cmd" in path.parts] if len(git_paths) > 0: bundled_git_lfs = False is_admin = pbuac.isUserAdmin() delete_paths = [] for git_path in git_paths: # find Git from Git/cmd/git.exe git_root = git_path.parents[1] possible_lfs_paths = ["cmd/git-lfs.exe", "mingw64/bin/git-lfs.exe", "mingw64/libexec/git-core/git-lfs.exe"] for possible_lfs_path in possible_lfs_paths: path = git_root / possible_lfs_path if path.exists(): try: if is_admin: path.unlink() else: delete_paths.append(str(path)) except FileNotFoundError: pass except OSError: pblog.error(f"Git LFS is bundled with Git, overriding your installed version. Please remove {path}.") bundled_git_lfs = True if not is_admin and len(delete_paths) > 0: pblog.info("Requesting permission to delete bundled Git LFS which is overriding your installed version...") quoted_paths = [f'"{path}"' for path in delete_paths] delete_cmdline = ["cmd.exe", "/c", "DEL", "/q", "/f"] + quoted_paths try: ret = pbuac.runAsAdmin(delete_cmdline) except OSError: pblog.error("User declined permission. Automatic delete failed.") for delete_path in delete_paths: path = pathlib.Path(delete_path) if path.exists(): bundled_git_lfs = True pblog.error(f"Git LFS is bundled with Git, overriding your installed version. Please remove {path}.") if bundled_git_lfs: pbtools.error_state() detected_lfs_version = pbgit.get_lfs_version() if detected_lfs_version == pbconfig.get('supported_lfs_version'): pblog.info(f"Current Git LFS version: {detected_lfs_version}") else: pblog.error("Git LFS is not updated to the supported version in your system") pblog.error(f"Supported Git LFS Version: {pbconfig.get('supported_lfs_version')}") pblog.error(f"Current Git LFS Version: {detected_lfs_version}") pblog.error("Please install the supported Git LFS version from https://git-lfs.github.com") if os.name == "nt": supported_lfs_version = pbconfig.get('supported_lfs_version').split("/")[1] webbrowser.open(f"https://github.com/git-lfs/git-lfs/releases/download/v{supported_lfs_version}/git-lfs-windows-v{supported_lfs_version}.exe") needs_git_update = True if needs_git_update: pbtools.error_state() pblog.info("------------------") # Do not execute if Unreal Editor is running if pbtools.get_running_process("UE4Editor") is not None: pbtools.error_state("Unreal Editor is currently running. Please close it before running PBSync. It may be listed only in Task Manager as a background process. As a last resort, you should log off and log in again.") current_branch = pbgit.get_current_branch_name() # repo was already fetched in StartProject.bat if current_branch != "promoted": pblog.info("Fetching recent changes on the repository...") fetch_base = ["git", "fetch", "origin"] branches = {"promoted", "master", "trunk", current_branch} fetch_base.extend(branches) pbtools.get_combined_output(fetch_base) # Do some housekeeping for git configuration pbgit.setup_config() # Check if we have correct credentials pbgit.check_credentials() pblog.info("------------------") # Execute synchronization part of script if we're on the expected branch, or force sync is enabled is_on_expected_branch = pbgit.compare_with_current_branch_name(pbconfig.get('expected_branch_name')) if sync_val == "force" or is_on_expected_branch: pbtools.resolve_conflicts_and_pull() pblog.info("------------------") project_version = pbunreal.get_project_version() if project_version is not None: pblog.info(f"Current project version: {project_version}") else: pbtools.error_state( "Something went wrong while fetching project version. Please request help in #tech-support.") if pbhub.is_pull_binaries_required(): pblog.info("Binaries are not up to date, trying to pull new binaries...") ret = pbhub.pull_binaries(project_version) if ret == 0: pblog.info("Binaries were pulled successfully") elif ret < 0: pbtools.error_state("Binaries pull failed, please view log for instructions.") elif ret > 0: pbtools.error_state("An error occurred while pulling binaries. Please request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved.", True) else: pblog.info("Binaries are up-to-date") else: pblog.warning(f"Current branch is not supported for repository synchronization: {pbgit.get_current_branch_name()}. Auto synchronization " "will be disabled") pblog.info("------------------") pblog.info("Checking for engine updates...") if pbgit.sync_file("ProjectBorealis.uproject") != 0: pbtools.error_state( "Something went wrong while updating the .uproject file. Please request help in #tech-support.") engine_version = pbunreal.get_engine_version(False) pblog.info("Trying to register current engine build if it exists. Otherwise, the build will be downloaded...") symbols_needed = pbunreal.is_versionator_symbols_enabled() bundle_name = pbconfig.get("ue4v_default_bundle") if pbunreal.run_ue4versionator(bundle_name, symbols_needed) != 0: pblog.error(f"Something went wrong while registering engine build {bundle_name}-{engine_version}. Please request help in #tech-support.") sys.exit(1) else: pblog.info(f"Engine build {bundle_name}-{engine_version} successfully registered") # Clean old engine installations, do that only in expected branch if is_on_expected_branch: if pbunreal.clean_old_engine_installations(): pblog.info("Old engine installations are successfully cleaned") else: pblog.warning("Something went wrong while cleaning old engine installations. You may want to clean them manually.") pblog.info("------------------") if pbunreal.check_ue4_file_association(): try: os.startfile(os.path.normpath(os.path.join(os.getcwd(), "ProjectBorealis.uproject"))) except NotImplementedError: pblog.info("You may now launch ProjectBorealis.uproject with Unreal Engine 4.") else: pbtools.error_state(".uproject extension is not correctly set into Unreal Engine. Make sure you have Epic Games Launcher installed. If problem still persists, please get help in #tech-support.") elif sync_val == "engineversion": if repository_val is None: pblog.error("--repository <URL> argument should be provided with --sync engine command") sys.exit(1) engine_version = pbunreal.get_latest_available_engine_version(str(repository_val)) if engine_version is None: pblog.error("Error while trying to fetch latest engine version") sys.exit(1) if not pbunreal.set_engine_version(engine_version): pblog.error("Error while trying to update engine version in .uproject file") sys.exit(1) pblog.info(f"Successfully changed engine version as {str(engine_version)}") elif sync_val == "ddc": pbunreal.generate_ddc_data() elif sync_val == "binaries": project_version = pbunreal.get_project_version() ret = pbhub.pull_binaries(project_version, True) if ret == 0: pblog.info(f"Binaries for {project_version} pulled & extracted successfully") else: pblog.error(f"Failed to pull binaries for {project_version}") sys.exit(1) elif sync_val == "engine": # Pull engine build with ue4versionator & register it if requested_bundle_name is None: requested_bundle_name = pbconfig.get("ue4v_default_bundle") engine_version = pbunreal.get_engine_version(False) if pbunreal.run_ue4versionator(requested_bundle_name) != 0: pblog.error(f"Something went wrong while registering engine build {requested_bundle_name}-{engine_version}") sys.exit(1) else: pblog.info(f"Engine build {requested_bundle_name}-{engine_version} successfully registered")
def autoversion_handler(autoversion_val): if pbunreal.project_version_increase(autoversion_val): pblog.info("Successfully increased project version") else: pblog.error("Error occurred while trying to increase project version") sys.exit(1)
def resolve_conflicts_and_pull(retry_count=0, max_retries=1): def should_attempt_auto_resolve(): return retry_count <= max_retries if retry_count: # wait a little bit if retrying (exponential) time.sleep(0.25 * (1 << retry_count)) # Disable watchman for now disable_watchman() out = get_combined_output(["git", "status", "--ahead-behind"]) pblog.info(out) if "ahead" not in out: pblog.info( "Please wait while getting the latest changes from the repository. It may take a while..." ) # Make sure upstream is tracked correctly pbgit.set_tracking_information(pbgit.get_current_branch_name()) pblog.info("Trying to stash local work...") proc = run_with_combined_output(["git", "stash"]) out = proc.stdout stashed = proc.returncode == 0 and "Saved working directory and index state" in out pblog.info(out) pblog.info( "Trying to rebase workspace with the latest changes from the repository..." ) result = run_with_combined_output( ["git", "pull", "--rebase", "--no-autostash"]) # TODO: autostash handling # pblog.info("Trying to rebase workspace with latest changes on the repository...") # result = run_with_combined_output(["git", "pull", "--rebase", "--autostash"]) code = result.returncode out = result.stdout pblog.info(out) out = out.lower() error = code != 0 else: stashed = False error = False def pop_if_stashed(): if stashed: pbgit.stash_pop() def handle_success(): pop_if_stashed() # ensure we pull LFS run(["git", "lfs", "pull"]) pblog.success( "Success! You are now on the latest changes without any conflicts." ) def handle_error(msg=None): pbgit.abort_all() pop_if_stashed() error_state(msg, fatal_error=True) if not error: handle_success() elif "fast-forwarded" in out: handle_success() elif "up to date" in out: handle_success() elif "rewinding head" in out and not ("error" in out or "conflict" in out): handle_success() elif "successfully rebased and updated" in out: handle_success() elif "failed to merge in the changes" in out or "could not apply" in out: handle_error( "Aborting the rebase. Changes on one of your commits will be overridden by incoming changes. Please request help in #tech-support to resolve conflicts, and please do not run StartProject.bat until the issue is resolved." ) elif "unmerged files" in out or "merge_head exists" in out: # we can't abort anything, but don't let stash linger to restore the original repo state pop_if_stashed() error_state( "You are in the middle of a merge. Please request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved.", fatal_error=True) elif "unborn" in out: if should_attempt_auto_resolve(): pblog.error("Unborn branch detected. Retrying...") retry_count += 1 resolve_conflicts_and_pull(retry_count) return else: handle_error( "You are on an unborn branch. Please request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved." ) elif "no remote" in out or "no such remote" in out or "refspecs without repo" in out: if should_attempt_auto_resolve(): pblog.error("Remote repository not found. Retrying...") retry_count += 1 resolve_conflicts_and_pull(retry_count, 2) return else: handle_error( "The remote repository could not be found. Please request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved." ) elif "cannot open" in out: if should_attempt_auto_resolve(): pblog.error("Git file info could not be read. Retrying...") retry_count += 1 resolve_conflicts_and_pull(retry_count, 3) return else: handle_error( "Git file info could not be read. Please request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved." ) else: # We have no idea what the state of the repo is. Do nothing except bail. error_state( "Aborting the repo update because of an unknown error. Request help in #tech-support to resolve it, and please do not run StartProject.bat until the issue is resolved.", fatal_error=True) # only prune if we don't have a stash out = get_combined_output(["git", "stash", "list"]) if len(out) < 3: if os.name == "nt": subprocess.Popen("git lfs prune -c ; git lfs dedup", shell=True, creationflags=subprocess.DETACHED_PROCESS) elif os.name == "posix": subprocess.Popen("nohup git lfs prune -c || nohup git lfs dedup", shell=True)
def pull_binaries(version_number: str, pass_checksum=False): if not os.path.isfile(hub_executable_path): pblog.error(f"Hub executable is not found at {hub_executable_path}") return 1 # Backward compatibility with old PBGet junctions. If it still exists, remove the junction if pbtools.is_junction("Binaries") and not pbtools.remove_junction("Binaries"): pblog.error("Something went wrong while removing junction for 'Binaries' folder. You should remove that folder manually to solve the problem") return -1 # Remove binary package if it exists, hub is not able to overwrite existing files if os.path.exists(binary_package_name): try: os.remove(binary_package_name) except Exception as e: pblog.exception(str(e)) pblog.error(f"Exception thrown while trying to remove {binary_package_name}. Please remove it manually") return -1 if not os.path.isfile(hub_config_path): pblog.info("You will now be asked to log in to your GitHub account. Please note that for security reasons, your password will not be shown as you type it.") # If user didn't login with hub yet, do it now for once output = pbtools.run([hub_executable_path, "release", "-L", "1"]) if not os.path.isfile(hub_config_path): pblog.error("Failed to login into hub with git credentials. Please check if your provided credentials are valid.") return pull_binaries(version_number, pass_checksum) else: pblog.info("Login to hub API was successful") try: output = pbtools.get_combined_output([hub_executable_path, "release", "download", version_number, "-i", binary_package_name]) if f"Downloading {binary_package_name}" in output: pass elif "Unable to find release with tag name" in output: pblog.error(f"Failed to find release tag {version_number}. Please wait and try again later.") return -1 elif "The file exists" in output: pblog.error(f"File {binary_package_name} was not able to be overwritten. Please remove it manually and run StartProject again.") return -1 elif "did not match any available assets" in output: pblog.error("Binaries for release {version_number} are not pushed into GitHub yet. Please wait and try again later.") return -1 elif not output: # hub doesn't print any output if package doesn't exist in release pblog.error(f"Failed to find binary package for release {version_number}") return 1 else: pblog.error(f"Unknown error occurred while pulling binaries for release {version_number}") pblog.error(f"Command output was: {output}") return 1 except Exception as e: pblog.exception(str(e)) pblog.error( f"Exception thrown while trying do pull binaries for {version_number}") return 1 # Temp fix for Binaries folder with unnecessary content if os.path.isdir("Binaries"): try: shutil.rmtree("Binaries") except Exception as e: pblog.exception(str(e)) pblog.error("Exception thrown while trying do clean Binaries folder") return 1 try: if pass_checksum: checksum_json_path = None else: checksum_json_path = pbconfig.get("checksum_file") if not os.path.exists(checksum_json_path): pblog.error(f"Checksum json file is not found at {checksum_json_path}") return 1 if not pbtools.compare_md5_single(binary_package_name, checksum_json_path): return 1 with ZipFile(binary_package_name) as zip_file: zip_file.extractall() if pass_checksum: return 0 elif not pbtools.compare_md5_all(checksum_json_path, True): return 1 except Exception as e: pblog.exception(str(e)) pblog.error(f"Exception thrown while trying do extract binary package for {version_number}") return 1 return 0