def __init__(self, root_dir, test_config, manifest, component, current_stage, product_type=None, build_type=None, custom_types=None): self._manifest = None self._infrastructure_path = pathlib.Path(__file__).resolve().parents[1] self._manifest = Manifest(manifest) self._component = self._manifest.get_component(component) self._default_stage = TestStage.TEST.value self._artifacts_layout = None self._product_type = product_type # TODO: create mapper for all tests combinations of components in product-configs if build_type: self._component.build_info.set_build_type(build_type) if custom_types: for comp, prod_type in custom_types.items(): self._manifest.get_component(comp).build_info.set_product_type( prod_type) super().__init__(root_dir, test_config, current_stage)
def install_components(manifest, components): """ :param manifest: Path to a manifest file :type manifest: String | Manifest :param components: List of components to install :type components: List :return: Boolean """ if not isinstance(manifest, Manifest): manifest = Manifest(manifest) pkg_type = get_pkg_type() log = logging.getLogger('install_components') for component in components: log.info(f'Installing component: {component}') comp = manifest.get_component(component) if not comp: log.error(f'{component} does not exist in manifest') return False repo = comp.trigger_repository artifacts = MediaSdkDirectories.get_build_dir( branch=repo.target_branch if repo.target_branch else repo.branch, build_event=Build_event.PRE_COMMIT.value if repo.target_branch else Build_event.COMMIT.value, commit_id=repo.revision, product_type=comp.product_type, build_type='release', product=component) packages = [ pkg_path for pkg_path in artifacts.glob(f'*.{pkg_type}') if component in pkg_path.name.lower() ] # TODO: solve situation with multiple packages installation, e.g. "package" and "package-devel" if len(packages) > 1: log.info( f'Found multiple "{component}" packages {packages} in {artifacts}' ) return False if len(packages) == 0: log.info(f'Package "{component}" was not found in {artifacts}') return False if not package_manager.uninstall_pkg(component): return False if not package_manager.install_pkg(packages[0]): return False return True
def __init__(self, build_config_path, root_dir, manifest, component, build_type, product_type, stage, target_arch=None, custom_cli_args=None): """ :param build_config_path: Path to build configuration file :type build_config_path: pathlib.Path :param root_dir: Main directory for product building :type root_dir: pathlib.Path :param manifest: Path to a manifest file :type manifest: String :param component: Name of component :type component: String :param build_type: Type of build (release|debug) :type build_type: String :param product_type: Type of product (linux|linux_embedded|linux_pre_si|windows) :type product_type: String :param stage: Build stage :type stage: String :param target_arch: Architecture of target platform :type target_arch: List :param custom_cli_args: Dict of custom command line arguments (ex. 'arg': 'value') :type custom_cli_args: Dict """ self._default_stage = Stage.BUILD.value super().__init__(root_dir, build_config_path, stage) self._build_state_file = root_dir / "build_state" self._options.update({ "REPOS_DIR": root_dir / "repos", "BUILD_DIR": root_dir / "build", "INSTALL_DIR": root_dir / "install", "PACK_DIR": root_dir / "pack", "DEPENDENCIES_DIR": root_dir / "dependencies", "BUILD_TYPE": build_type, # sets from command line argument ('release' by default) "STRIP_BINARIES": False, # Flag for stripping binaries of build }) self._product_repos = [] self._dev_pkg_data_to_archive = [] self._install_pkg_data_to_archive = [] self._custom_cli_args = custom_cli_args self._target_arch = target_arch self._manifest = Manifest(manifest) self._component = self._manifest.get_component(component) self._component.build_info.set_build_type(build_type) self._component.build_info.set_product_type(product_type)
def __init__(self, root_dir, repo, branch, revision, target_branch, build_event, commit_time): """ :param root_dir: Directory where repositories will be extracted :type root_dir: String :param repo: Repository name :type repo: Repository name :param branch: Branch name :type branch: Branch name :param revision: Revision of a commit :type revision: Revision of a commit :param target_branch: Target branch name :type target_branch: Target branch name :param build_event: Event of a build :type build_event: Event of a build :param commit_time: Time to slice revisions :type commit_time: Time to slice revisions """ self._release_repos = ['product-configs', 'MediaSDK', 'media-driver'] self._root_dir = pathlib.Path(root_dir) self._repo = repo self._branch = branch self._revision = revision self._target_branch = target_branch self._build_event = build_event self._commit_time = datetime.strptime(commit_time, '%Y-%m-%d %H:%M:%S') \ if commit_time else None self._manifest = Manifest( pathlib.Path(__file__).resolve().parents[2] / 'product-configs' / 'manifest.yml') self._release_branch = {} self._updated_repos = None self._log = logging.getLogger(self.__class__.__name__)
def install_components(manifest, components): """ :param manifest: Path to a manifest file :type manifest: String | Manifest :param components: List of components to install :type components: List :return: Boolean """ if not isinstance(manifest, Manifest): manifest = Manifest(manifest) pkg_type = get_pkg_type() log = logging.getLogger('install_components') for component in components: log.info(f'Installing component: {component}') comp = manifest.get_component(component) if not comp: log.error(f'{component} does not exist in manifest') return False artifacts = get_build_dir(manifest, component) packages = [pkg_path for pkg_path in artifacts.glob(f'*.{pkg_type}') if component in pkg_path.name.lower()] # TODO: solve situation with multiple packages installation, e.g. "package" and "package-devel" if not packages: log.info(f'Package "{component}" was not found in {artifacts}') return False elif len(packages) > 1: log.info(f'Found multiple "{component}" packages {packages} in {artifacts}') return False if not package_manager.uninstall_pkg(component): return False if not package_manager.install_pkg(packages[0]): return False return True
def check_component_existence(path_to_manifest, component_name): log = logging.getLogger('component_checker') log.info(f"Getting data for {component_name} from {path_to_manifest}") manifest = Manifest(pathlib.Path(path_to_manifest)) component = manifest.get_component(component_name) repository = component.get_repository(component_name) component_dir = MediaSdkDirectories.get_build_dir( repository.target_branch if repository.target_branch else repository.branch, Build_event.COMMIT.value, repository.revision, component.product_type, Build_type.RELEASE.value, product=component_name) if component_dir.exists(): log.info(f"Directory {component_dir} exists") # This is stop phrase for buildbot to skip all build stages log.info(SKIP_BUILDING_DEPENDENCY_PHRASE) else: log.info(f"Directory {component_dir} doesn't exist")
def _change_manifest_file(self): """ Change revision and branch for the selected component """ self._log.info(f'Changing manifest file') try: manifest = Manifest(self._manifest_path) component = manifest.get_component(self._component_name) tmp_repo = component.get_repository(self._component_name) repository = Repository(tmp_repo.name, tmp_repo.url, self._branch, tmp_repo.target_branch, self._revision, self._commit_time, tmp_repo.type) component.add_repository(repository, replace=True) manifest.save_manifest(self._manifest_path) self._log.info('Manifest file was changed') except Exception as e: self._log.exception('Changing manifest file failed: %s', e) return False return True
def extract_open_source_infrastructure(root_dir, branch, commit_id, commit_time, manifest): repos = MediaSdkDirectories() open_source_product_configs_repo = repos.open_source_product_configs_repo open_source_infra_repo = repos.open_source_infrastructure_repo # Extract product configs if not manifest: extract_repo(root_repo_dir=root_dir, repo_name=open_source_product_configs_repo, branch=branch, commit_id=commit_id, commit_time=commit_time) manifest_data = Manifest(root_dir / open_source_product_configs_repo / 'manifest.yml') else: manifest_data = Manifest(manifest) product_conf = manifest_data.get_component('infra').get_repository( open_source_product_configs_repo) extract_repo(root_repo_dir=root_dir, repo_name=product_conf.name, branch=product_conf.branch, commit_id=product_conf.revision, commit_time=commit_time) open_source_infra = manifest_data.get_component('infra').get_repository( open_source_infra_repo) # Extract open source infrastructure extract_repo(root_repo_dir=root_dir, repo_name=open_source_infra.name, branch=open_source_infra.branch, commit_id=open_source_infra.revision)
def __init__(self, artifacts, root_dir, current_stage): self._artifacts_dir = None self._manifest = None self._infrastructure_path = pathlib.Path(__file__).resolve().parents[1] if artifacts.exists(): if artifacts.is_file(): self._artifacts_dir = artifacts.parent self._manifest = Manifest(artifacts) else: self._artifacts_dir = artifacts self._manifest = Manifest(artifacts / 'manifest.yml') try: config_path = list(self._artifacts_dir.glob('conf_*_test.py'))[0] except Exception: raise TestScenarioNotFoundException('Test scenario does not exist') else: raise ArtifactsNotFoundException(f'{artifacts} does not exist') self._default_stage = TestStage.TEST.value super().__init__(root_dir, config_path, current_stage)
def generate_build_links(manifest): """ Get and print url to artifacts for each component :param manifest: Path to manifest.yml :type manifest: String :return: Boolean """ try: manifest = Manifest(manifest) print('*' * 50) for component, product_types in COMPONENTS.items(): component_info = manifest.get_component(component) for product_type in product_types: component_info.build_info.set_product_type(product_type) print(f'{product_type}: {get_build_url(manifest, component)}') print('*' * 50) except Exception: return False return True
def _is_latest_revision(self, last_build_file): """ Check that current revision is latest :param last_build_file: Path to last_build_* file :type last_build_file: pathlib.Path """ try: with last_build_file.open('r') as last_build_path: manifest = Manifest(last_build_file.parents[3] / last_build_path.read() / 'manifest.yml') last_repo = manifest.get_component(self._manifest.event_component.name).get_repository( self._manifest.event_repo.name) except Exception: # Create last_build_* file return True # Current revision is the latest if revision from last_build_* file exists in local repository repo_path = self._options['REPOS_DIR'] / last_repo.name rev_list = ProductState.get_revisions_list(repo_path) if last_repo.revision in rev_list[1:]: return True return False
def generate_build_links(manifest): """ Get and print url to artifacts for each component :param manifest: Path to manifest.yml :type manifest: String :return: Boolean """ try: manifest = Manifest(manifest) print('*' * 50) for component in COMPONENTS_LIST: print(f'{component}: {get_build_url(manifest, component)}') print('*' * 50) except Exception: return False return True
def extract_open_source_infrastructure(root_dir, branch, commit_id, commit_time, manifest): log = logging.getLogger('extract_repo.extract_open_source_infrastructure') repos = MediaSdkDirectories() open_source_product_configs_repo = repos.open_source_product_configs_repo open_source_infra_repo = repos.open_source_infrastructure_repo # Extract product configs if not manifest: extract_repo(root_repo_dir=root_dir, repo_name=open_source_product_configs_repo, branch=branch, commit_id=commit_id, commit_time=commit_time) manifest_data = Manifest(root_dir / open_source_product_configs_repo / 'manifest.yml') else: manifest_data = Manifest(manifest) product_conf = manifest_data.get_component('infra').get_repository( open_source_product_configs_repo) extract_repo(root_repo_dir=root_dir, repo_name=product_conf.name, branch=product_conf.branch, commit_id=product_conf.revision, commit_time=commit_time) open_source_infra = manifest_data.get_component('infra').get_repository( open_source_infra_repo) # Extract open source infrastructure extract_repo(root_repo_dir=root_dir, repo_name=open_source_infra.name, branch=open_source_infra.branch, commit_id=open_source_infra.revision) try: log.info(f"Copy secrets") shutil.copyfile( str(pathlib.Path('msdk_secrets.py').absolute()), str(root_dir / open_source_infra_repo / 'common' / 'msdk_secrets.py')) except Exception: log.exception('Can not create infrastructure package') # TODO: An exit from script should be in main() exit_script(ErrorCode.CRITICAL)
def check_component_existence(path_to_manifest, component_name, product_type, build_type): log = logging.getLogger('component_checker') log.info(f"Getting data for {component_name} from {path_to_manifest}") manifest = Manifest(pathlib.Path(path_to_manifest)) if product_type: manifest.get_component(component_name).build_info.set_product_type( product_type) if build_type: manifest.get_component(component_name).build_info.set_build_type( build_type) component_dir = get_build_dir(manifest, component_name) if component_dir.exists(): log.info(f"Directory {component_dir} exists") link_to_artifacts = get_build_url(manifest, component_name) log.info(f"Artifacts are available by: {link_to_artifacts}") # This is stop phrase for buildbot to skip all build stages log.info(SKIP_BUILDING_DEPENDENCY_PHRASE) else: log.info(f"Directory {component_dir} doesn't exist")
class BuildGenerator(ConfigGenerator): """ Main class. Contains commands for building product. """ def __init__(self, build_config_path, root_dir, manifest, component, build_type, product_type, stage, target_arch=None, custom_cli_args=None): """ :param build_config_path: Path to build configuration file :type build_config_path: pathlib.Path :param root_dir: Main directory for product building :type root_dir: pathlib.Path :param manifest: Path to a manifest file :type manifest: String :param component: Name of component :type component: String :param build_type: Type of build (release|debug) :type build_type: String :param product_type: Type of product (linux|linux_embedded|linux_pre_si|windows) :type product_type: String :param stage: Build stage :type stage: String :param target_arch: Architecture of target platform :type target_arch: List :param custom_cli_args: Dict of custom command line arguments (ex. 'arg': 'value') :type custom_cli_args: Dict """ self._default_stage = Stage.BUILD.value super().__init__(root_dir, build_config_path, stage) self._build_state_file = root_dir / "build_state" self._options.update({ "REPOS_DIR": root_dir / "repos", "BUILD_DIR": root_dir / "build", "INSTALL_DIR": root_dir / "install", "PACK_DIR": root_dir / "pack", "DEPENDENCIES_DIR": root_dir / "dependencies", "BUILD_TYPE": build_type, # sets from command line argument ('release' by default) "STRIP_BINARIES": False, # Flag for stripping binaries of build }) self._product_repos = [] self._dev_pkg_data_to_archive = [] self._install_pkg_data_to_archive = [] self._custom_cli_args = custom_cli_args self._target_arch = target_arch self._manifest = Manifest(manifest) self._component = self._manifest.get_component(component) self._component.build_info.set_build_type(build_type) self._component.build_info.set_product_type(product_type) def _update_global_vars(self): self._global_vars.update({ 'vs_component': self._vs_component, 'stage': Stage, 'copy_win_files': copy_win_files, 'args': self._custom_cli_args, 'product_type': self._component.build_info.product_type, 'build_event': self._component.build_info.build_event, # TODO should be in lower case 'DEV_PKG_DATA_TO_ARCHIVE': self._dev_pkg_data_to_archive, 'INSTALL_PKG_DATA_TO_ARCHIVE': self._install_pkg_data_to_archive, 'get_build_number': get_build_number, 'get_api_version': self._get_api_version, 'branch_name': self._component.trigger_repository.branch, 'changed_repo_name': self._manifest.event_repo.name, 'update_config': self._update_config, 'target_arch': self._target_arch, 'get_packing_cmd': get_packing_cmd, 'get_commit_number': ProductState.get_commit_number, 'copytree': copytree, 'manifest': self._manifest, 'create_file': create_file }) def _get_config_vars(self): if 'PRODUCT_REPOS' in self._config_variables: for repo in self._config_variables['PRODUCT_REPOS']: self._product_repos.append(repo['name']) def _action(self, name, stage=None, cmd=None, work_dir=None, env=None, callfunc=None, verbose=False): """ Handler for 'action' from build config file :param name: Name of action :type name: String :param stage: Stage type :type stage: Stage :param cmd: command line script :type cmd: None | String :param work_dir: Path where script will execute :type work_dir: None | pathlib.Path :param env: Environment variables for script :type env: None | Dict :param callfunc: python function, which need to execute :type callfunc: tuple (function_name, args, kwargs) | None :return: None | Exception """ if not stage: stage = Stage.BUILD.value else: stage = stage.value if not work_dir: work_dir = self._options["ROOT_DIR"] if stage in [Stage.BUILD.value, Stage.INSTALL.value]: work_dir = self._options["BUILD_DIR"] if stage == Stage.BUILD.value and self._current_stage == Stage.BUILD.value: configure_logger( name, self._options['LOGS_DIR'] / 'build' / f'{name}.log') self._actions[stage].append( Action(name, stage, cmd, work_dir, env, callfunc, verbose)) def _vs_component(self, name, solution_path, msbuild_args=None, vs_version="vs2017", dependencies=None, env=None, verbose=False): """ Handler for VS components :param name: Name of action :type name: String :param solution_path: Path to solution file :type solution_path: pathlib.Path :param msbuild_args: Arguments of 'msbuild' :type msbuild_args: Dictionary :param vs_version: Version of Visual Studio :type vs_version: String :param dependencies: Dependency of other actions :type dependencies: List :param env: Environment variables for script :type env: None | Dict :return: None | Exception """ ms_arguments = deepcopy( self._config_variables.get('MSBUILD_ARGUMENTS', {})) if msbuild_args: for key, value in msbuild_args.items(): if isinstance(value, dict): ms_arguments[key] = { **ms_arguments.get(key, {}), **msbuild_args[key] } else: ms_arguments[key] = msbuild_args[key] if self._current_stage == Stage.BUILD.value: configure_logger( name, self._options['LOGS_DIR'] / 'build' / f'{name}.log') self._actions[Stage.BUILD.value].append( VsComponent(name, solution_path, ms_arguments, vs_version, dependencies, env, verbose)) @retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=30)) def _clean(self): """ Clean build directories :return: None | Exception """ self._log.info('-' * 50) self._log.info('CLEANING') remove_dirs = { 'BUILD_DIR', 'INSTALL_DIR', 'LOGS_DIR', 'PACK_DIR', 'DEPENDENCIES_DIR' } for directory in remove_dirs: dir_path = self._options.get(directory) if dir_path.exists(): self._log.info(f'remove directory {dir_path}') shutil.rmtree(dir_path) self._options["LOGS_DIR"].mkdir(parents=True, exist_ok=True) if self._build_state_file.exists(): self._log.info('remove build state file %s', self._build_state_file) self._build_state_file.unlink() if not self._run_build_config_actions(Stage.CLEAN.value): return False return True def _extract(self): """ Get and prepare build repositories Uses git_worker.py module :return: None | Exception """ self._log.info('-' * 50) self._log.info("EXTRACTING") self._options['REPOS_DIR'].mkdir(parents=True, exist_ok=True) self._options['PACK_DIR'].mkdir(parents=True, exist_ok=True) repo_states = collections.defaultdict(dict) for repo in self._component.repositories: if not self._product_repos or repo.name in self._product_repos: repo_states[repo.name]['target_branch'] = repo.target_branch repo_states[repo.name]['branch'] = repo.branch repo_states[repo.name]['commit_id'] = repo.revision repo_states[repo.name]['url'] = repo.url repo_states[repo.name][ 'trigger'] = repo.name == self._component.build_info.trigger product_state = ProductState(repo_states, self._options["REPOS_DIR"]) product_state.extract_all_repos() product_state.save_repo_states( self._options["PACK_DIR"] / 'repo_states.json', trigger=self._component.build_info.trigger) self._manifest.save_manifest(self._options["PACK_DIR"] / 'manifest.yml') shutil.copyfile(self._config_path, self._options["PACK_DIR"] / self._config_path.name) test_scenario = self._config_path.parent / f'{self._config_path.stem}_test{self._config_path.suffix}' if test_scenario.exists(): shutil.copyfile(test_scenario, self._options["PACK_DIR"] / test_scenario.name) if not self._get_dependencies(): return False if not self._run_build_config_actions(Stage.EXTRACT.value): return False return True def _build(self): """ Execute 'build' stage :return: None | Exception """ self._log.info('-' * 50) self._log.info("BUILDING") self._options['BUILD_DIR'].mkdir(parents=True, exist_ok=True) if not self._run_build_config_actions(Stage.BUILD.value): return False if self._options['STRIP_BINARIES']: if not self._strip_bins(): return False return True def _test(self): """ Execute 'test' stage :return: None | Exception """ self._log.info('-' * 50) self._log.info("TESTING") self._options['BUILD_DIR'].mkdir(parents=True, exist_ok=True) if not self._run_build_config_actions(Stage.TEST.value): return False return True def _install(self): """ Execute 'install' stage :return: None | Exception """ self._log.info('-' * 50) self._log.info("INSTALLING") self._options['INSTALL_DIR'].mkdir(parents=True, exist_ok=True) if not self._run_build_config_actions(Stage.INSTALL.value): return False return True def _pack(self): """ Pack build results creates *.tar.gz archives Layout: pack_root_dir install_pkg.tar.gz (store 'install' stage results) developer_pkg.tar.gz (store 'build' stage results) logs.tar.gz repo_states.json :return: None | Exception """ self._log.info('-' * 50) self._log.info("PACKING") self._options['PACK_DIR'].mkdir(parents=True, exist_ok=True) no_errors = True if not self._run_build_config_actions(Stage.PACK.value): no_errors = False if platform.system() == 'Windows': extension = "zip" elif platform.system() == 'Linux': extension = "tar.gz" else: self._log.critical( f'Can not pack data on this OS: {platform.system()}') return False # creating install package if self._install_pkg_data_to_archive: if not make_archive( self._options["PACK_DIR"] / f"install_pkg.{extension}", self._install_pkg_data_to_archive): no_errors = False else: self._log.info('Install package empty. Skip packing.') # creating developer package if self._dev_pkg_data_to_archive: if not make_archive( self._options["PACK_DIR"] / f"developer_pkg.{extension}", self._dev_pkg_data_to_archive): no_errors = False else: self._log.info('Developer package empty. Skip packing.') # creating logs package logs_data = [ { 'from_path': self._options['ROOT_DIR'], 'relative': [ { 'path': 'logs' }, ] }, ] if not make_archive(self._options["PACK_DIR"] / f"logs.{extension}", logs_data): no_errors = False if not no_errors: self._log.error('Not all data was packed') return False return True def _copy(self): """ Copy 'pack' stage results to share folder :return: None | Exception """ self._log.info('-' * 50) self._log.info("COPYING") build_state = {'status': "PASS"} if self._build_state_file.exists(): with self._build_state_file.open() as state: build_state = json.load(state) if build_state['status'] == "FAIL": build_dir = get_build_dir(self._manifest, self._component.name, is_failed=True) build_url = get_build_url(self._manifest, self._component.name, is_failed=True) else: build_dir = get_build_dir(self._manifest, self._component.name) build_url = get_build_url(self._manifest, self._component.name) build_root_dir = get_build_dir(self._manifest, self._component.name, link_type='root') rotate_dir(build_dir) self._log.info('Copy to %s', build_dir) self._log.info('Artifacts are available by: %s', build_url) # Workaround for copying to samba share on Linux # to avoid exceptions while setting Linux permissions. _orig_copystat = shutil.copystat shutil.copystat = lambda x, y, follow_symlinks=True: x shutil.copytree(self._options['PACK_DIR'], build_dir) shutil.copystat = _orig_copystat if not self._run_build_config_actions(Stage.COPY.value): return False if build_state['status'] == "PASS": last_build_path = build_dir.relative_to(build_root_dir) last_build_file = build_dir.parent.parent / f'last_build_{self._component.build_info.product_type}' last_build_file.write_text(str(last_build_path)) return True def _strip_bins(self): """ Strip binaries and save debug information :return: Boolean """ self._log.info('-' * 80) self._log.info(f'Stripping binaries') system_os = platform.system() if system_os == 'Linux': bins_to_strip = [] binaries_with_error = [] executable_bin_filter = ['', '.so'] search_results = self._options['BUILD_DIR'].rglob('*') for path in search_results: if path.is_file(): if os.access( path, os.X_OK) and path.suffix in executable_bin_filter: bins_to_strip.append(path) for result in bins_to_strip: orig_file = str(result.absolute()) debug_file = str( (result.parent / f'{result.stem}.sym').absolute()) self._log.debug('-' * 80) self._log.debug(f'Stripping {orig_file}') strip_commands = OrderedDict([ ('copy_debug', ['objcopy', '--only-keep-debug', orig_file, debug_file]), ('strip', [ 'strip', '--strip-debug', '--strip-unneeded', '--remove-section=.comment', orig_file ]), ('add_debug_link', [ 'objcopy', f'--add-gnu-debuglink={debug_file}', orig_file ]), ('set_chmod', ['chmod', '-x', debug_file]) ]) check_binary_command = f'file {orig_file} | grep ELF' for command in strip_commands.values(): err, out = cmd_exec(command, shell=False, log=self._log, verbose=False) if err: # Not strip file if it is not binary return_code, _ = cmd_exec(check_binary_command, shell=True, log=self._log, verbose=False) if return_code: self._log.warning( f"File {orig_file} is not binary") break if orig_file not in binaries_with_error: binaries_with_error.append(orig_file) self._log.error(out) continue if binaries_with_error: self._log.error( 'Stripping for next binaries was failed. ' 'See full log for details:\n%s', '\n'.join(binaries_with_error)) return False elif system_os == 'Windows': pass else: self._log.error(f'Can not strip binaries on {system_os}') return False return True def _get_api_version(self, repo_name): """ Get major and minor API version for Windows build from mfxdefs.h Used for windows weekly build :param repo_name: name of repository :type repo_name: string :return: minor API version, major API version :rtype: Tuple """ # TODO: update for using in linux closed and open source builds major_version = minor_version = "0" header_name = 'mfxdefs.h' mfxdefs_path = self._options[ 'REPOS_DIR'] / repo_name / 'include' / header_name if mfxdefs_path.exists(): is_major_version_found = False is_minor_version_found = False with open(mfxdefs_path, 'r') as lines: for line in lines: major_version_pattern = re.search( r'MFX_VERSION_MAJOR\s(\d+)', line) if major_version_pattern: major_version = major_version_pattern.group(1) is_major_version_found = True continue minor_version_pattern = re.search( r'MFX_VERSION_MINOR\s(\d+)', line) if minor_version_pattern: minor_version = minor_version_pattern.group(1) is_minor_version_found = True if not is_major_version_found: self._log.warning(f'MFX_VERSION_MAJOR does not exist') if not is_minor_version_found: self._log.warning(f'MFX_VERSION_MINOR does not exist') else: self._log.warning(f'{header_name} does not exist') self._log.info( f'Returned versions: MAJOR {major_version}, MINOR {minor_version}') return major_version, minor_version def _update_config(self, pkgconfig_dir, update_data, copy_to=None, pattern='*.pc'): """ Change prefix in pkgconfigs :param pkgconfig_dir: Path to package config directory :type: pathlib.Path :param update_data: new data to write to pkgconfigs :type: dict :param copy_to: optional parameter for creating new dir for pkgconfigs :type: String :return: Flag whether files were successfully modified """ # Create new dir for pkgconfigs if copy_to: try: copytree(pkgconfig_dir, copy_to) pkgconfig_dir = copy_to self._log.debug( f"update_config: pkgconfigs were copied from {pkgconfig_dir} to {copy_to}" ) except OSError: self._log.error( f"update_config: Failed to copy package configs from {pkgconfig_dir} to {copy_to}" ) raise files_list = pkgconfig_dir.glob(pattern) for pkgconfig in files_list: with pkgconfig.open('r+') as conf_file: self._log.debug(f"update_config: Start updating {pkgconfig}") try: current_config_data = conf_file.readlines() conf_file.seek(0) conf_file.truncate() for line in current_config_data: for pattern, data in update_data.items(): line = re.sub(pattern, data, line) conf_file.write(line) self._log.debug(f"update_config: {pkgconfig} is updated") except OSError: self._log.error( f"update_config: Failed to update package config: {pkgconfig}" ) raise def _get_dependencies(self): deps = self._config_variables.get("DEPENDENCIES", {}) if not deps: return True try: deps_dir = self._options['DEPENDENCIES_DIR'] self._log.info( f'Dependencies was found. Trying to extract to {deps_dir}') deps_dir.mkdir(parents=True, exist_ok=True) self._log.info(f'Creating manifest') for dependency in deps: self._log.info(f'Getting component {dependency}') comp = self._manifest.get_component(dependency) if comp: try: dep_dir = get_build_dir(self._manifest, dependency) # TODO: Extension hardcoded for open source. Need to use only .zip in future. dep_pkg = dep_dir / f'install_pkg.tar.gz' self._log.info(f'Extracting {dep_pkg}') extract_archive(dep_pkg, deps_dir / dependency) except Exception: self._log.exception('Can not extract archive') return False else: self._log.error( f'Component {dependency} does not exist in manifest') return False except Exception: self._log.exception('Exception occurred:') return False return True
def extract_private_infrastructure(root_dir, branch, commit_id, commit_time, manifest): log = logging.getLogger('extract_repo.extract_private_infrastructure') infrastructure_root_dir = root_dir / 'infrastructure' configs_root_dir = root_dir / 'product-configs' # We save and update repos in temporary folder and create infrastructure package from it # So, not needed extracting repo to the beginning each time original_repos_dir = root_dir / 'tmp_infrastructure' repos = MediaSdkDirectories() open_source_product_configs_repo = repos.open_source_product_configs_repo open_source_infra_repo = repos.open_source_infrastructure_repo closed_source_product_configs_repo = repos.closed_source_product_configs_repo closed_source_infra_repo = repos.closed_source_infrastructure_repo # Extract open source infrastructure and product configs extract_open_source_infrastructure(original_repos_dir, branch, commit_id, commit_time, manifest) # Extract closed source product configs extract_repo(root_repo_dir=original_repos_dir, repo_name=closed_source_product_configs_repo, branch='master', commit_time=commit_time) manifest_path = original_repos_dir / closed_source_product_configs_repo / 'manifest.yml' manifest_data = Manifest(manifest_path) closed_source_infra = manifest_data.get_component('infra').get_repository( closed_source_infra_repo) # Extract closed source infrastructure extract_repo(root_repo_dir=original_repos_dir, repo_name=closed_source_infra.name, branch=closed_source_infra.branch, commit_id=closed_source_infra.revision) # Event repository in infra component for private builds is product-configs, # so need to change default trigger value manifest_data.get_component('infra').build_info.set_trigger( open_source_product_configs_repo) manifest_data.save_manifest(manifest_path) log.info('-' * 50) log.info(f"Create infrastructure package") try: log.info(f"- Delete existing infrastructure") if infrastructure_root_dir.exists(): remove_directory(str(infrastructure_root_dir)) if configs_root_dir.exists(): remove_directory(str(configs_root_dir)) log.info(f"- Copy open source infrastructure") copy_tree(str(original_repos_dir / open_source_infra_repo), str(infrastructure_root_dir)) log.info(f"- Copy closed source infrastructure") copy_tree(str(original_repos_dir / closed_source_infra_repo), str(infrastructure_root_dir)) log.info(f"- Remove closed source static data") (infrastructure_root_dir / 'common' / 'static_closed_data.py').unlink() log.info(f"- Copy open source product configs") copy_tree(str(original_repos_dir / open_source_product_configs_repo), str(configs_root_dir)) log.info(f"- Copy closed source product configs") copy_tree(str(original_repos_dir / closed_source_product_configs_repo), str(configs_root_dir)) # log.info(f"Copy secrets") shutil.copyfile( str(pathlib.Path('msdk_secrets.py').absolute()), str(infrastructure_root_dir / 'common' / 'msdk_secrets.py')) except Exception: log.exception('Can not create infrastructure package') exit_script(ErrorCode.CRITICAL)
def extract_closed_source_infrastructure(root_dir, branch, commit_id, commit_time, manifest): log = logging.getLogger( 'extract_repo.extract_closed_source_infrastructure') infrastructure_root_dir = root_dir / 'infrastructure' configs_root_dir = root_dir / 'product-configs' # We save and update repos in temporary folder and create infrastructure package from it # So, not needed extracting repo to the beginning each time original_repos_dir = root_dir / 'tmp_infrastructure' repos = MediaSdkDirectories() closed_source_product_configs_repo = repos.closed_source_product_configs_repo open_source_infra_repo = repos.open_source_infrastructure_repo closed_source_infra_repo = repos.closed_source_infrastructure_repo # Extract product configs if not manifest: extract_repo(root_repo_dir=original_repos_dir, repo_name=closed_source_product_configs_repo, branch=branch, commit_id=commit_id, commit_time=commit_time) manifest_data = Manifest(original_repos_dir / closed_source_product_configs_repo / 'manifest.yml') else: manifest_data = Manifest(manifest) product_conf = manifest_data.get_component('infra').get_repository( closed_source_product_configs_repo) extract_repo(root_repo_dir=original_repos_dir, repo_name=product_conf.name, branch=product_conf.branch, commit_id=product_conf.revision, commit_time=commit_time) open_source_infra = manifest_data.get_component('infra').get_repository( open_source_infra_repo) closed_source_infra = manifest_data.get_component('infra').get_repository( closed_source_infra_repo) # Extract open source infrastructure # Set proxy for access to GitHub extract_repo(root_repo_dir=original_repos_dir, repo_name=open_source_infra.name, branch=open_source_infra.branch, commit_id=open_source_infra.revision, proxy=True) # Extract closed source part of infrastructure extract_repo(root_repo_dir=original_repos_dir, repo_name=closed_source_infra.name, branch=closed_source_infra.branch, commit_id=closed_source_infra.revision) log.info('-' * 50) log.info(f"Create infrastructure package") try: log.info(f"- Delete existing infrastructure") if infrastructure_root_dir.exists(): remove_directory(str(infrastructure_root_dir)) if configs_root_dir.exists(): remove_directory(str(configs_root_dir)) log.info(f"- Copy open source infrastructure") copy_tree(str(original_repos_dir / open_source_infra_repo), str(infrastructure_root_dir)) log.info(f"- Copy closed source infrastructure") copy_tree(str(original_repos_dir / closed_source_infra_repo), str(infrastructure_root_dir)) log.info(f"- Copy product configs") copy_tree(str(original_repos_dir / closed_source_product_configs_repo), str(configs_root_dir)) # log.info(f"Copy secrets") shutil.copyfile( str(pathlib.Path('msdk_secrets.py').absolute()), str(infrastructure_root_dir / 'common' / 'msdk_secrets.py')) except Exception: log.exception('Can not create infrastructure package') exit_script(ErrorCode.CRITICAL)
class ManifestRunner: """ Prepare manifest """ def __init__(self, root_dir, repo, branch, revision, target_branch, build_event, commit_time): """ :param root_dir: Directory where repositories will be extracted :type root_dir: String :param repo: Repository name :type repo: Repository name :param branch: Branch name :type branch: Branch name :param revision: Revision of a commit :type revision: Revision of a commit :param target_branch: Target branch name :type target_branch: Target branch name :param build_event: Event of a build :type build_event: Event of a build :param commit_time: Time to slice revisions :type commit_time: Time to slice revisions """ self._release_repos = ['product-configs', 'MediaSDK', 'media-driver'] self._root_dir = pathlib.Path(root_dir) self._repo = repo self._branch = branch self._revision = revision self._target_branch = target_branch self._build_event = build_event self._commit_time = datetime.strptime(commit_time, '%Y-%m-%d %H:%M:%S') \ if commit_time else None self._manifest = Manifest( pathlib.Path(__file__).resolve().parents[2] / 'product-configs' / 'manifest.yml') self._release_branch = {} self._updated_repos = None self._log = logging.getLogger(self.__class__.__name__) def _check_branch(self): """ Check release branch """ self._log.info('Checking release branch') if self._target_branch: branch_to_check = self._target_branch else: branch_to_check = self._branch if MediaSdkDirectories.is_release_branch(branch_to_check): sdk_br, driver_br = convert_branch(branch_to_check) for repo_name in self._release_repos: if repo_name == 'media-driver': self._release_branch[repo_name] = driver_br else: self._release_branch[repo_name] = sdk_br def _extract_repos(self): """ Extract and slice repositories """ self._log.info('Extracting repositories') sources_list = {} for component in self._manifest.components: for repo in component.repositories: if repo.name == self._repo: sources_list[repo.name] = { 'branch': self._branch, 'target_branch': self._target_branch, 'commit_id': self._revision, 'is_trigger': True, 'url': repo.url } else: if repo.revision is not None: continue sources_list[repo.name] = { 'branch': self._release_branch.get(repo.name, repo.branch), 'target_branch': repo.target_branch, 'commit_id': repo.revision, 'is_trigger': False, 'url': repo.url } states = ProductState(sources_list, self._root_dir, self._commit_time) states.extract_all_repos() self._updated_repos = { state.repo_name: state for state in states.repo_states } def _update_manifest(self): """ Update manifest from extracted product-configs repo """ self._log.info('Updating manifest') for component in self._manifest.components: for repo in component.repositories: if repo.name == self._repo: component.build_info.set_build_event(self._build_event) component.build_info.set_trigger(repo.name) self._manifest.set_event_component(component.name) self._manifest.set_event_repo(repo.name) if repo.name in self._updated_repos: upd_repo = Repository( self._updated_repos[repo.name].repo_name, self._updated_repos[repo.name].url, self._updated_repos[repo.name].branch_name, self._updated_repos[repo.name].target_branch, self._updated_repos[repo.name].commit_id, str(self._updated_repos[repo.name].repo.commit(). committed_datetime.astimezone())) component.add_repository(upd_repo, replace=True) def _save_manifest(self): """ Save updated manifest """ self._log.info('Saving manifest') component_name = self._manifest.event_component.name manifest_path = get_build_dir(self._manifest, component_name, link_type='manifest') / 'manifest.yml' manifest_url = get_build_url(self._manifest, component_name, link_type='manifest') + '/manifest.yml' self._manifest.save_manifest(manifest_path) self._log.info(f'Manifest was saved to: %s', manifest_path) self._log.info(f'Manifest is available by link: %s', manifest_url) def run(self): """ Execute manifest creating process """ self._check_branch() self._extract_repos() self._update_manifest() self._save_manifest()
class TestRunner(ConfigGenerator): """ Main class. Contains commands for testing product. """ def __init__(self, root_dir, test_config, manifest, component, current_stage, product_type=None, build_type=None, custom_types=None): self._manifest = None self._infrastructure_path = pathlib.Path(__file__).resolve().parents[1] self._manifest = Manifest(manifest) self._component = self._manifest.get_component(component) self._default_stage = TestStage.TEST.value self._artifacts_layout = None self._product_type = product_type # TODO: create mapper for all tests combinations of components in product-configs if build_type: self._component.build_info.set_build_type(build_type) if custom_types: for comp, prod_type in custom_types.items(): self._manifest.get_component(comp).build_info.set_product_type( prod_type) super().__init__(root_dir, test_config, current_stage) def _update_global_vars(self): self._global_vars.update({ 'stage': TestStage, 'infra_path': self._infrastructure_path }) def _get_config_vars(self): if 'ARTIFACTS_LAYOUT' in self._config_variables: self._artifacts_layout = self._config_variables['ARTIFACTS_LAYOUT'] def _run_build_config_actions(self, stage): """ Run actions of selected stage :param stage: Stage name :type stage: String :return: Boolean """ is_passed = True for action in self._actions[stage]: error_code = action.run(self._options) if error_code: is_passed = False return is_passed def _clean(self): """ Clean build directories :return: None | Exception """ self._log.info('-' * 50) self._log.info("CLEANING") remove_dirs = {'ROOT_DIR'} for directory in remove_dirs: dir_path = self._options.get(directory) if dir_path.exists(): self._log.info(f'remove directory {dir_path}') shutil.rmtree(dir_path) self._options["LOGS_DIR"].mkdir(parents=True, exist_ok=True) if not self._run_build_config_actions(TestStage.CLEAN.value): return False return True def _install(self): self._log.info('-' * 50) self._log.info("INSTALLING") components = self._config_variables.get('INSTALL', []) if components and not install_components(self._manifest, components): return False if not self._run_build_config_actions(TestStage.INSTALL.value): return False return True def _test(self): self._log.info('-' * 50) self._log.info("TESTING") is_success = True if not self._run_build_config_actions(TestStage.TEST.value): is_success = False return is_success def _copy(self): self._log.info('-' * 50) self._log.info("COPYING") if self._product_type: self._component.build_info.set_product_type(self._product_type) artifacts_dir = get_test_dir(self._manifest, self._component.name) artifacts_url = get_test_url(self._manifest, self._component.name) rotate_dir(artifacts_dir) if self._artifacts_layout: _orig_copystat = shutil.copystat shutil.copystat = lambda x, y, follow_symlinks=True: x for local_path, share_dir in self._artifacts_layout.items(): local_path = pathlib.Path(local_path).resolve() if local_path.is_dir(): shutil.copytree(local_path, artifacts_dir / share_dir, ignore=shutil.ignore_patterns('bin')) elif local_path.is_file(): shutil.copyfile(local_path, artifacts_dir / share_dir) shutil.copystat = _orig_copystat self._log.info(f'Artifacts copied to: {artifacts_dir}') self._log.info(f'Artifacts available by link: {artifacts_url}') else: self._log.info('Nothing to copy') if not self._run_build_config_actions(TestStage.COPY.value): return False return True
def __init__(self, build_config_path, root_dir, build_type, product_type, build_event, stage, commit_time=None, changed_repo=None, repo_states_file_path=None, target_arch=None, custom_cli_args=None, target_branch=None, manifest_file=None, component_name=None): """ :param build_config_path: Path to build configuration file :type build_config_path: pathlib.Path :param root_dir: Main directory for product building :type root_dir: pathlib.Path :param build_type: Type of build (release|debug) :type build_type: String :param product_type: Type of product (linux|linux_embedded|linux_pre_si|windows) :type product_type: String :param build_event: Event of build (pre_commit|commit|nightly|weekly) :type build_event: String :param stage: Build stage :type stage: String :param commit_time: Time for getting slice of commits of repositories :type commit_time: datetime :param changed_repo: Information about changed source repository :type changed_repo: String :param repo_states_file_path: Path to sources file with revisions of repositories to reproduce the same build :type repo_states_file_path: String :param target_arch: Architecture of target platform :type target_arch: List :param custom_cli_args: Dict of custom command line arguments (ex. 'arg': 'value') :type custom_cli_args: Dict """ self._default_stage = Stage.BUILD.value super().__init__(root_dir, build_config_path, stage) self._product_repos = {} self._product = None self._product_type = product_type self._build_event = build_event self._commit_time = commit_time self._changed_repo = changed_repo self._repo_states = None self._build_state_file = root_dir / "build_state" self._options.update({ "REPOS_DIR": root_dir / "repos", "BUILD_DIR": root_dir / "build", "INSTALL_DIR": root_dir / "install", "PACK_DIR": root_dir / "pack", "DEPENDENCIES_DIR": root_dir / "dependencies", "BUILD_TYPE": build_type, # sets from command line argument ('release' by default) "CPU_CORES": multiprocessing.cpu_count(), # count of logical CPU cores "VARS": {}, # Dictionary of dynamical variables for action() steps "ENV": {}, # Dictionary of dynamical environment variables "STRIP_BINARIES": False, # Flag for stripping binaries of build }) self._dev_pkg_data_to_archive = [] self._install_pkg_data_to_archive = [] self._custom_cli_args = custom_cli_args self._target_arch = target_arch self._target_branch = target_branch self._manifest_file = manifest_file manifest_path = pathlib.Path( manifest_file ) if manifest_file else self._config_path.parent / 'manifest.yml' if manifest_path.exists(): self._manifest = Manifest(manifest_path) else: self._manifest = Manifest() self._log.warning('Created empty manifest.') if changed_repo: changed_repo_dict = changed_repo.split(':') self._branch_name = changed_repo_dict[1] self._changed_repo_name = changed_repo_dict[0] elif repo_states_file_path: self._branch_name = 'master' repo_states_file = pathlib.Path(repo_states_file_path) if repo_states_file.exists(): with repo_states_file.open() as repo_states_json: self._repo_states = json.load(repo_states_json) for repo_name, repo_state in self._repo_states.items(): if repo_state['trigger']: self._branch_name = repo_state['branch'] self._changed_repo_name = repo_name self._target_branch = repo_state.get( 'target_branch') break else: raise Exception(f'{repo_states_file} does not exist') elif manifest_file: component = self._manifest.get_component(component_name) repo = component.trigger_repository self._branch_name = repo.branch self._changed_repo_name = repo.name self._target_branch = repo.target_branch else: self._branch_name = 'master' self._changed_repo_name = None
class TestRunner(ConfigGenerator): """ Main class. Contains commands for testing product. """ def __init__(self, root_dir, test_config, manifest, component, current_stage, product_type=None, build_type=None, custom_types=None): self._manifest = None self._infrastructure_path = pathlib.Path(__file__).resolve().parents[1] self._manifest = Manifest(manifest) self._component = self._manifest.get_component(component) self._default_stage = TestStage.TEST.value self._artifacts_layout = None self._product_type = product_type # TODO: create mapper for all tests combinations of components in product-configs if build_type: self._component.build_info.set_build_type(build_type) if custom_types: for comp, prod_type in custom_types.items(): self._manifest.get_component(comp).build_info.set_product_type( prod_type) super().__init__(root_dir, test_config, current_stage) self._options.update({"REPOS_DIR": root_dir / "repos"}) self._product_repos = [] def _update_global_vars(self): self._global_vars.update({ 'stage': TestStage, 'infra_path': self._infrastructure_path, 'PATH': os.environ["PATH"] }) def _get_config_vars(self): if 'ARTIFACTS_LAYOUT' in self._config_variables: self._artifacts_layout = self._config_variables['ARTIFACTS_LAYOUT'] if 'PRODUCT_REPOS' in self._config_variables: for repo in self._config_variables['PRODUCT_REPOS']: self._product_repos.append(repo['name']) def _run_build_config_actions(self, stage): """ Run actions of selected stage :param stage: Stage name :type stage: String :return: Boolean """ is_passed = True for action in self._actions[stage]: error_code = action.run(self._options) if error_code: is_passed = False return is_passed def _clean(self): """ Clean build directories :return: None | Exception """ self._log.info('-' * 50) self._log.info("CLEANING") remove_dirs = {'LOGS_DIR'} for directory in remove_dirs: dir_path = self._options.get(directory) if dir_path.exists(): self._log.info(f'remove directory {dir_path}') shutil.rmtree(dir_path) self._options["LOGS_DIR"].mkdir(parents=True, exist_ok=True) if not self._run_build_config_actions(TestStage.CLEAN.value): return False return True def _extract(self): """ Get and prepare build repositories Uses git_worker.py module :return: None | Exception """ self._log.info('-' * 50) self._log.info("EXTRACTING") self._options['REPOS_DIR'].mkdir(parents=True, exist_ok=True) repo_states = collections.defaultdict(dict) for repo in self._component.repositories: if not self._product_repos or repo.name in self._product_repos: repo_states[repo.name]['target_branch'] = repo.target_branch repo_states[repo.name]['branch'] = repo.branch repo_states[repo.name]['commit_id'] = repo.revision repo_states[repo.name]['url'] = repo.url repo_states[repo.name][ 'trigger'] = repo.name == self._component.build_info.trigger product_state = ProductState(repo_states, self._options["REPOS_DIR"]) product_state.extract_all_repos() if not self._run_build_config_actions(TestStage.EXTRACT.value): return False return True def _install(self): self._log.info('-' * 50) self._log.info("INSTALLING") components = self._config_variables.get('INSTALL', []) if components and not install_components(self._manifest, components): return False if not self._run_build_config_actions(TestStage.INSTALL.value): return False return True def _test(self): self._log.info('-' * 50) self._log.info("TESTING") is_success = True if not self._run_build_config_actions(TestStage.TEST.value): is_success = False return is_success def _copy(self): self._log.info('-' * 50) self._log.info("COPYING") if self._product_type: self._component.build_info.set_product_type(self._product_type) artifacts_dir = get_test_dir(self._manifest, self._component.name) artifacts_url = get_test_url(self._manifest, self._component.name) rotate_dir(artifacts_dir) if self._artifacts_layout: _orig_copystat = shutil.copystat shutil.copystat = lambda x, y, follow_symlinks=True: x for local_path, share_dir in self._artifacts_layout.items(): local_path = pathlib.Path(local_path).resolve() if local_path.is_dir(): shutil.copytree(local_path, artifacts_dir / share_dir, ignore=shutil.ignore_patterns('bin')) elif local_path.is_file(): shutil.copyfile(local_path, artifacts_dir / share_dir) shutil.copystat = _orig_copystat self._log.info(f'Artifacts copied to: {artifacts_dir}') self._log.info(f'Artifacts available by link: {artifacts_url}') else: self._log.info('Nothing to copy') if not self._run_build_config_actions(TestStage.COPY.value): return False return True