def _change_manifest_file(self): """ Change revision and branch for the selected component """ self._log.info(f'Changing manifest file') try: manifest = Manifest(self._manifest_path) component = manifest.get_component(self._component_name) tmp_repo = component.get_repository(self._component_name) repository = Repository(tmp_repo.name, tmp_repo.url, self._branch, tmp_repo.target_branch, self._revision, self._commit_time, tmp_repo.type) component.add_repository(repository, replace=True) manifest.save_manifest(self._manifest_path) self._log.info('Manifest file was changed') except Exception as e: self._log.exception('Changing manifest file failed: %s', e) return False return True
class BuildGenerator(ConfigGenerator): """ Main class. Contains commands for building product. """ def __init__(self, build_config_path, root_dir, manifest, component, build_type, product_type, stage, target_arch=None, custom_cli_args=None): """ :param build_config_path: Path to build configuration file :type build_config_path: pathlib.Path :param root_dir: Main directory for product building :type root_dir: pathlib.Path :param manifest: Path to a manifest file :type manifest: String :param component: Name of component :type component: String :param build_type: Type of build (release|debug) :type build_type: String :param product_type: Type of product (linux|linux_embedded|linux_pre_si|windows) :type product_type: String :param stage: Build stage :type stage: String :param target_arch: Architecture of target platform :type target_arch: List :param custom_cli_args: Dict of custom command line arguments (ex. 'arg': 'value') :type custom_cli_args: Dict """ self._default_stage = Stage.BUILD.value super().__init__(root_dir, build_config_path, stage) self._build_state_file = root_dir / "build_state" self._options.update({ "REPOS_DIR": root_dir / "repos", "BUILD_DIR": root_dir / "build", "INSTALL_DIR": root_dir / "install", "PACK_DIR": root_dir / "pack", "DEPENDENCIES_DIR": root_dir / "dependencies", "BUILD_TYPE": build_type, # sets from command line argument ('release' by default) "STRIP_BINARIES": False, # Flag for stripping binaries of build }) self._product_repos = [] self._dev_pkg_data_to_archive = [] self._install_pkg_data_to_archive = [] self._custom_cli_args = custom_cli_args self._target_arch = target_arch self._manifest = Manifest(manifest) self._component = self._manifest.get_component(component) self._component.build_info.set_build_type(build_type) self._component.build_info.set_product_type(product_type) def _update_global_vars(self): self._global_vars.update({ 'vs_component': self._vs_component, 'stage': Stage, 'copy_win_files': copy_win_files, 'args': self._custom_cli_args, 'product_type': self._component.build_info.product_type, 'build_event': self._component.build_info.build_event, # TODO should be in lower case 'DEV_PKG_DATA_TO_ARCHIVE': self._dev_pkg_data_to_archive, 'INSTALL_PKG_DATA_TO_ARCHIVE': self._install_pkg_data_to_archive, 'get_build_number': get_build_number, 'get_api_version': self._get_api_version, 'branch_name': self._component.trigger_repository.branch, 'changed_repo_name': self._manifest.event_repo.name, 'update_config': self._update_config, 'target_arch': self._target_arch, 'get_packing_cmd': get_packing_cmd, 'get_commit_number': ProductState.get_commit_number, 'copytree': copytree, 'manifest': self._manifest, 'create_file': create_file }) def _get_config_vars(self): if 'PRODUCT_REPOS' in self._config_variables: for repo in self._config_variables['PRODUCT_REPOS']: self._product_repos.append(repo['name']) def _action(self, name, stage=None, cmd=None, work_dir=None, env=None, callfunc=None, verbose=False): """ Handler for 'action' from build config file :param name: Name of action :type name: String :param stage: Stage type :type stage: Stage :param cmd: command line script :type cmd: None | String :param work_dir: Path where script will execute :type work_dir: None | pathlib.Path :param env: Environment variables for script :type env: None | Dict :param callfunc: python function, which need to execute :type callfunc: tuple (function_name, args, kwargs) | None :return: None | Exception """ if not stage: stage = Stage.BUILD.value else: stage = stage.value if not work_dir: work_dir = self._options["ROOT_DIR"] if stage in [Stage.BUILD.value, Stage.INSTALL.value]: work_dir = self._options["BUILD_DIR"] if stage == Stage.BUILD.value and self._current_stage == Stage.BUILD.value: configure_logger( name, self._options['LOGS_DIR'] / 'build' / f'{name}.log') self._actions[stage].append( Action(name, stage, cmd, work_dir, env, callfunc, verbose)) def _vs_component(self, name, solution_path, msbuild_args=None, vs_version="vs2017", dependencies=None, env=None, verbose=False): """ Handler for VS components :param name: Name of action :type name: String :param solution_path: Path to solution file :type solution_path: pathlib.Path :param msbuild_args: Arguments of 'msbuild' :type msbuild_args: Dictionary :param vs_version: Version of Visual Studio :type vs_version: String :param dependencies: Dependency of other actions :type dependencies: List :param env: Environment variables for script :type env: None | Dict :return: None | Exception """ ms_arguments = deepcopy( self._config_variables.get('MSBUILD_ARGUMENTS', {})) if msbuild_args: for key, value in msbuild_args.items(): if isinstance(value, dict): ms_arguments[key] = { **ms_arguments.get(key, {}), **msbuild_args[key] } else: ms_arguments[key] = msbuild_args[key] if self._current_stage == Stage.BUILD.value: configure_logger( name, self._options['LOGS_DIR'] / 'build' / f'{name}.log') self._actions[Stage.BUILD.value].append( VsComponent(name, solution_path, ms_arguments, vs_version, dependencies, env, verbose)) @retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=30)) def _clean(self): """ Clean build directories :return: None | Exception """ self._log.info('-' * 50) self._log.info('CLEANING') remove_dirs = { 'BUILD_DIR', 'INSTALL_DIR', 'LOGS_DIR', 'PACK_DIR', 'DEPENDENCIES_DIR' } for directory in remove_dirs: dir_path = self._options.get(directory) if dir_path.exists(): self._log.info(f'remove directory {dir_path}') shutil.rmtree(dir_path) self._options["LOGS_DIR"].mkdir(parents=True, exist_ok=True) if self._build_state_file.exists(): self._log.info('remove build state file %s', self._build_state_file) self._build_state_file.unlink() if not self._run_build_config_actions(Stage.CLEAN.value): return False return True def _extract(self): """ Get and prepare build repositories Uses git_worker.py module :return: None | Exception """ self._log.info('-' * 50) self._log.info("EXTRACTING") self._options['REPOS_DIR'].mkdir(parents=True, exist_ok=True) self._options['PACK_DIR'].mkdir(parents=True, exist_ok=True) repo_states = collections.defaultdict(dict) for repo in self._component.repositories: if not self._product_repos or repo.name in self._product_repos: repo_states[repo.name]['target_branch'] = repo.target_branch repo_states[repo.name]['branch'] = repo.branch repo_states[repo.name]['commit_id'] = repo.revision repo_states[repo.name]['url'] = repo.url repo_states[repo.name][ 'trigger'] = repo.name == self._component.build_info.trigger product_state = ProductState(repo_states, self._options["REPOS_DIR"]) product_state.extract_all_repos() product_state.save_repo_states( self._options["PACK_DIR"] / 'repo_states.json', trigger=self._component.build_info.trigger) self._manifest.save_manifest(self._options["PACK_DIR"] / 'manifest.yml') shutil.copyfile(self._config_path, self._options["PACK_DIR"] / self._config_path.name) test_scenario = self._config_path.parent / f'{self._config_path.stem}_test{self._config_path.suffix}' if test_scenario.exists(): shutil.copyfile(test_scenario, self._options["PACK_DIR"] / test_scenario.name) if not self._get_dependencies(): return False if not self._run_build_config_actions(Stage.EXTRACT.value): return False return True def _build(self): """ Execute 'build' stage :return: None | Exception """ self._log.info('-' * 50) self._log.info("BUILDING") self._options['BUILD_DIR'].mkdir(parents=True, exist_ok=True) if not self._run_build_config_actions(Stage.BUILD.value): return False if self._options['STRIP_BINARIES']: if not self._strip_bins(): return False return True def _test(self): """ Execute 'test' stage :return: None | Exception """ self._log.info('-' * 50) self._log.info("TESTING") self._options['BUILD_DIR'].mkdir(parents=True, exist_ok=True) if not self._run_build_config_actions(Stage.TEST.value): return False return True def _install(self): """ Execute 'install' stage :return: None | Exception """ self._log.info('-' * 50) self._log.info("INSTALLING") self._options['INSTALL_DIR'].mkdir(parents=True, exist_ok=True) if not self._run_build_config_actions(Stage.INSTALL.value): return False return True def _pack(self): """ Pack build results creates *.tar.gz archives Layout: pack_root_dir install_pkg.tar.gz (store 'install' stage results) developer_pkg.tar.gz (store 'build' stage results) logs.tar.gz repo_states.json :return: None | Exception """ self._log.info('-' * 50) self._log.info("PACKING") self._options['PACK_DIR'].mkdir(parents=True, exist_ok=True) no_errors = True if not self._run_build_config_actions(Stage.PACK.value): no_errors = False if platform.system() == 'Windows': extension = "zip" elif platform.system() == 'Linux': extension = "tar.gz" else: self._log.critical( f'Can not pack data on this OS: {platform.system()}') return False # creating install package if self._install_pkg_data_to_archive: if not make_archive( self._options["PACK_DIR"] / f"install_pkg.{extension}", self._install_pkg_data_to_archive): no_errors = False else: self._log.info('Install package empty. Skip packing.') # creating developer package if self._dev_pkg_data_to_archive: if not make_archive( self._options["PACK_DIR"] / f"developer_pkg.{extension}", self._dev_pkg_data_to_archive): no_errors = False else: self._log.info('Developer package empty. Skip packing.') # creating logs package logs_data = [ { 'from_path': self._options['ROOT_DIR'], 'relative': [ { 'path': 'logs' }, ] }, ] if not make_archive(self._options["PACK_DIR"] / f"logs.{extension}", logs_data): no_errors = False if not no_errors: self._log.error('Not all data was packed') return False return True def _copy(self): """ Copy 'pack' stage results to share folder :return: None | Exception """ self._log.info('-' * 50) self._log.info("COPYING") build_state = {'status': "PASS"} if self._build_state_file.exists(): with self._build_state_file.open() as state: build_state = json.load(state) if build_state['status'] == "FAIL": build_dir = get_build_dir(self._manifest, self._component.name, is_failed=True) build_url = get_build_url(self._manifest, self._component.name, is_failed=True) else: build_dir = get_build_dir(self._manifest, self._component.name) build_url = get_build_url(self._manifest, self._component.name) build_root_dir = get_build_dir(self._manifest, self._component.name, link_type='root') rotate_dir(build_dir) self._log.info('Copy to %s', build_dir) self._log.info('Artifacts are available by: %s', build_url) # Workaround for copying to samba share on Linux # to avoid exceptions while setting Linux permissions. _orig_copystat = shutil.copystat shutil.copystat = lambda x, y, follow_symlinks=True: x shutil.copytree(self._options['PACK_DIR'], build_dir) shutil.copystat = _orig_copystat if not self._run_build_config_actions(Stage.COPY.value): return False if build_state['status'] == "PASS": last_build_path = build_dir.relative_to(build_root_dir) last_build_file = build_dir.parent.parent / f'last_build_{self._component.build_info.product_type}' last_build_file.write_text(str(last_build_path)) return True def _strip_bins(self): """ Strip binaries and save debug information :return: Boolean """ self._log.info('-' * 80) self._log.info(f'Stripping binaries') system_os = platform.system() if system_os == 'Linux': bins_to_strip = [] binaries_with_error = [] executable_bin_filter = ['', '.so'] search_results = self._options['BUILD_DIR'].rglob('*') for path in search_results: if path.is_file(): if os.access( path, os.X_OK) and path.suffix in executable_bin_filter: bins_to_strip.append(path) for result in bins_to_strip: orig_file = str(result.absolute()) debug_file = str( (result.parent / f'{result.stem}.sym').absolute()) self._log.debug('-' * 80) self._log.debug(f'Stripping {orig_file}') strip_commands = OrderedDict([ ('copy_debug', ['objcopy', '--only-keep-debug', orig_file, debug_file]), ('strip', [ 'strip', '--strip-debug', '--strip-unneeded', '--remove-section=.comment', orig_file ]), ('add_debug_link', [ 'objcopy', f'--add-gnu-debuglink={debug_file}', orig_file ]), ('set_chmod', ['chmod', '-x', debug_file]) ]) check_binary_command = f'file {orig_file} | grep ELF' for command in strip_commands.values(): err, out = cmd_exec(command, shell=False, log=self._log, verbose=False) if err: # Not strip file if it is not binary return_code, _ = cmd_exec(check_binary_command, shell=True, log=self._log, verbose=False) if return_code: self._log.warning( f"File {orig_file} is not binary") break if orig_file not in binaries_with_error: binaries_with_error.append(orig_file) self._log.error(out) continue if binaries_with_error: self._log.error( 'Stripping for next binaries was failed. ' 'See full log for details:\n%s', '\n'.join(binaries_with_error)) return False elif system_os == 'Windows': pass else: self._log.error(f'Can not strip binaries on {system_os}') return False return True def _get_api_version(self, repo_name): """ Get major and minor API version for Windows build from mfxdefs.h Used for windows weekly build :param repo_name: name of repository :type repo_name: string :return: minor API version, major API version :rtype: Tuple """ # TODO: update for using in linux closed and open source builds major_version = minor_version = "0" header_name = 'mfxdefs.h' mfxdefs_path = self._options[ 'REPOS_DIR'] / repo_name / 'include' / header_name if mfxdefs_path.exists(): is_major_version_found = False is_minor_version_found = False with open(mfxdefs_path, 'r') as lines: for line in lines: major_version_pattern = re.search( r'MFX_VERSION_MAJOR\s(\d+)', line) if major_version_pattern: major_version = major_version_pattern.group(1) is_major_version_found = True continue minor_version_pattern = re.search( r'MFX_VERSION_MINOR\s(\d+)', line) if minor_version_pattern: minor_version = minor_version_pattern.group(1) is_minor_version_found = True if not is_major_version_found: self._log.warning(f'MFX_VERSION_MAJOR does not exist') if not is_minor_version_found: self._log.warning(f'MFX_VERSION_MINOR does not exist') else: self._log.warning(f'{header_name} does not exist') self._log.info( f'Returned versions: MAJOR {major_version}, MINOR {minor_version}') return major_version, minor_version def _update_config(self, pkgconfig_dir, update_data, copy_to=None, pattern='*.pc'): """ Change prefix in pkgconfigs :param pkgconfig_dir: Path to package config directory :type: pathlib.Path :param update_data: new data to write to pkgconfigs :type: dict :param copy_to: optional parameter for creating new dir for pkgconfigs :type: String :return: Flag whether files were successfully modified """ # Create new dir for pkgconfigs if copy_to: try: copytree(pkgconfig_dir, copy_to) pkgconfig_dir = copy_to self._log.debug( f"update_config: pkgconfigs were copied from {pkgconfig_dir} to {copy_to}" ) except OSError: self._log.error( f"update_config: Failed to copy package configs from {pkgconfig_dir} to {copy_to}" ) raise files_list = pkgconfig_dir.glob(pattern) for pkgconfig in files_list: with pkgconfig.open('r+') as conf_file: self._log.debug(f"update_config: Start updating {pkgconfig}") try: current_config_data = conf_file.readlines() conf_file.seek(0) conf_file.truncate() for line in current_config_data: for pattern, data in update_data.items(): line = re.sub(pattern, data, line) conf_file.write(line) self._log.debug(f"update_config: {pkgconfig} is updated") except OSError: self._log.error( f"update_config: Failed to update package config: {pkgconfig}" ) raise def _get_dependencies(self): deps = self._config_variables.get("DEPENDENCIES", {}) if not deps: return True try: deps_dir = self._options['DEPENDENCIES_DIR'] self._log.info( f'Dependencies was found. Trying to extract to {deps_dir}') deps_dir.mkdir(parents=True, exist_ok=True) self._log.info(f'Creating manifest') for dependency in deps: self._log.info(f'Getting component {dependency}') comp = self._manifest.get_component(dependency) if comp: try: dep_dir = get_build_dir(self._manifest, dependency) # TODO: Extension hardcoded for open source. Need to use only .zip in future. dep_pkg = dep_dir / f'install_pkg.tar.gz' self._log.info(f'Extracting {dep_pkg}') extract_archive(dep_pkg, deps_dir / dependency) except Exception: self._log.exception('Can not extract archive') return False else: self._log.error( f'Component {dependency} does not exist in manifest') return False except Exception: self._log.exception('Exception occurred:') return False return True
def extract_private_infrastructure(root_dir, branch, commit_id, commit_time, manifest): log = logging.getLogger('extract_repo.extract_private_infrastructure') infrastructure_root_dir = root_dir / 'infrastructure' configs_root_dir = root_dir / 'product-configs' # We save and update repos in temporary folder and create infrastructure package from it # So, not needed extracting repo to the beginning each time original_repos_dir = root_dir / 'tmp_infrastructure' repos = MediaSdkDirectories() open_source_product_configs_repo = repos.open_source_product_configs_repo open_source_infra_repo = repos.open_source_infrastructure_repo closed_source_product_configs_repo = repos.closed_source_product_configs_repo closed_source_infra_repo = repos.closed_source_infrastructure_repo # Extract open source infrastructure and product configs extract_open_source_infrastructure(original_repos_dir, branch, commit_id, commit_time, manifest) # Extract closed source product configs extract_repo(root_repo_dir=original_repos_dir, repo_name=closed_source_product_configs_repo, branch='master', commit_time=commit_time) manifest_path = original_repos_dir / closed_source_product_configs_repo / 'manifest.yml' manifest_data = Manifest(manifest_path) closed_source_infra = manifest_data.get_component('infra').get_repository( closed_source_infra_repo) # Extract closed source infrastructure extract_repo(root_repo_dir=original_repos_dir, repo_name=closed_source_infra.name, branch=closed_source_infra.branch, commit_id=closed_source_infra.revision) # Event repository in infra component for private builds is product-configs, # so need to change default trigger value manifest_data.get_component('infra').build_info.set_trigger( open_source_product_configs_repo) manifest_data.save_manifest(manifest_path) log.info('-' * 50) log.info(f"Create infrastructure package") try: log.info(f"- Delete existing infrastructure") if infrastructure_root_dir.exists(): remove_directory(str(infrastructure_root_dir)) if configs_root_dir.exists(): remove_directory(str(configs_root_dir)) log.info(f"- Copy open source infrastructure") copy_tree(str(original_repos_dir / open_source_infra_repo), str(infrastructure_root_dir)) log.info(f"- Copy closed source infrastructure") copy_tree(str(original_repos_dir / closed_source_infra_repo), str(infrastructure_root_dir)) log.info(f"- Remove closed source static data") (infrastructure_root_dir / 'common' / 'static_closed_data.py').unlink() log.info(f"- Copy open source product configs") copy_tree(str(original_repos_dir / open_source_product_configs_repo), str(configs_root_dir)) log.info(f"- Copy closed source product configs") copy_tree(str(original_repos_dir / closed_source_product_configs_repo), str(configs_root_dir)) # log.info(f"Copy secrets") shutil.copyfile( str(pathlib.Path('msdk_secrets.py').absolute()), str(infrastructure_root_dir / 'common' / 'msdk_secrets.py')) except Exception: log.exception('Can not create infrastructure package') exit_script(ErrorCode.CRITICAL)
class ManifestRunner: """ Prepare manifest """ def __init__(self, root_dir, repo, branch, revision, target_branch, build_event, commit_time): """ :param root_dir: Directory where repositories will be extracted :type root_dir: String :param repo: Repository name :type repo: Repository name :param branch: Branch name :type branch: Branch name :param revision: Revision of a commit :type revision: Revision of a commit :param target_branch: Target branch name :type target_branch: Target branch name :param build_event: Event of a build :type build_event: Event of a build :param commit_time: Time to slice revisions :type commit_time: Time to slice revisions """ self._release_repos = ['product-configs', 'MediaSDK', 'media-driver'] self._root_dir = pathlib.Path(root_dir) self._repo = repo self._branch = branch self._revision = revision self._target_branch = target_branch self._build_event = build_event self._commit_time = datetime.strptime(commit_time, '%Y-%m-%d %H:%M:%S') \ if commit_time else None self._manifest = Manifest( pathlib.Path(__file__).resolve().parents[2] / 'product-configs' / 'manifest.yml') self._release_branch = {} self._updated_repos = None self._log = logging.getLogger(self.__class__.__name__) def _check_branch(self): """ Check release branch """ self._log.info('Checking release branch') if self._target_branch: branch_to_check = self._target_branch else: branch_to_check = self._branch if MediaSdkDirectories.is_release_branch(branch_to_check): sdk_br, driver_br = convert_branch(branch_to_check) for repo_name in self._release_repos: if repo_name == 'media-driver': self._release_branch[repo_name] = driver_br else: self._release_branch[repo_name] = sdk_br def _extract_repos(self): """ Extract and slice repositories """ self._log.info('Extracting repositories') sources_list = {} for component in self._manifest.components: for repo in component.repositories: if repo.name == self._repo: sources_list[repo.name] = { 'branch': self._branch, 'target_branch': self._target_branch, 'commit_id': self._revision, 'is_trigger': True, 'url': repo.url } else: if repo.revision is not None: continue sources_list[repo.name] = { 'branch': self._release_branch.get(repo.name, repo.branch), 'target_branch': repo.target_branch, 'commit_id': repo.revision, 'is_trigger': False, 'url': repo.url } states = ProductState(sources_list, self._root_dir, self._commit_time) states.extract_all_repos() self._updated_repos = { state.repo_name: state for state in states.repo_states } def _update_manifest(self): """ Update manifest from extracted product-configs repo """ self._log.info('Updating manifest') for component in self._manifest.components: for repo in component.repositories: if repo.name == self._repo: component.build_info.set_build_event(self._build_event) component.build_info.set_trigger(repo.name) self._manifest.set_event_component(component.name) self._manifest.set_event_repo(repo.name) if repo.name in self._updated_repos: upd_repo = Repository( self._updated_repos[repo.name].repo_name, self._updated_repos[repo.name].url, self._updated_repos[repo.name].branch_name, self._updated_repos[repo.name].target_branch, self._updated_repos[repo.name].commit_id, str(self._updated_repos[repo.name].repo.commit(). committed_datetime.astimezone())) component.add_repository(upd_repo, replace=True) def _save_manifest(self): """ Save updated manifest """ self._log.info('Saving manifest') component_name = self._manifest.event_component.name manifest_path = get_build_dir(self._manifest, component_name, link_type='manifest') / 'manifest.yml' manifest_url = get_build_url(self._manifest, component_name, link_type='manifest') + '/manifest.yml' self._manifest.save_manifest(manifest_path) self._log.info(f'Manifest was saved to: %s', manifest_path) self._log.info(f'Manifest is available by link: %s', manifest_url) def run(self): """ Execute manifest creating process """ self._check_branch() self._extract_repos() self._update_manifest() self._save_manifest()