def test_subprocess_rule_error(self, capture): with self.assertRaises(RuleError): SubprocessRule( ['abcdefghijk'], stdout_writer=logging.info, stderr_writer=logging.warning)() with self.assertRaises(RuleError): SubprocessRule( ['false'], stdout_writer=logging.info, stderr_writer=logging.warning)()
def test_subprocess_rule(self, capture): """This function tests behaviour of the class buildrules.common.rule.SubprocessRule.""" sp1 = SubprocessRule( ['echo'], stdout_writer=logging.info, stderr_writer=logging.warning)() sp2 = SubprocessRule( ['echo', 'a', 'b'], stdout_writer=logging.info, stderr_writer=logging.warning)() sp3 = SubprocessRule( ['echo $TEST'], {'TEST': 'test'}, shell=True, stdout_writer=logging.info, stderr_writer=logging.warning)() capture.check( ( 'SubprocessRule', 'INFO', 'Running SubprocessRule: { sp_function: ' "echo, " 'env: None, ' 'shell: False }' ), ( 'SubprocessRule', 'INFO', 'Running SubprocessRule: { sp_function: ' "echo a b, " 'env: None, ' 'shell: False }' ), ( 'root', 'INFO', "a b" ), ( 'SubprocessRule', 'INFO', 'Running SubprocessRule: { sp_function: ' "echo $TEST, " "env: {'TEST': 'test'}, " 'shell: True }' ), ( 'root', 'INFO', "test" ), )
def _get_rsync_deployment_command(self, dry_run=False): rsync_deployer_config = self.DEFAULT_CONFIGS.copy() rsync_deployer_config.update(**self._deployer_config) cmd = ['rsync'] rsync_flags = rsync_deployer_config['rsync_flags'].split(' ') cmd.extend(rsync_flags) if rsync_deployer_config['chmod_options']: cmd.append('--chmod={0}'.format(rsync_deployer_config['chmod_options'])) ssh_command = rsync_deployer_config['ssh_command'] if rsync_deployer_config['use_ssh']: cmd.extend(['-e', ssh_command]) if rsync_deployer_config['delete']: cmd.append('--delete') rsync_cwd = rsync_deployer_config['working_directory'] if rsync_cwd: src = os.path.relpath(rsync_deployer_config['source'], rsync_cwd) else: src = rsync_deployer_config['source'] target = '"{0}"'.format(rsync_deployer_config['dest']) target_host = rsync_deployer_config.get('target_host', None) if target_host: target = '{0}:{1}'.format(target_host, target) src = '"{0}/"'.format(src) return SubprocessRule(cmd + [src, target], shell=True, cwd=rsync_cwd)
def get_compiler_find_rule(spec_list): return SubprocessRule( (self._spack_cmd + ['find', '-p'] + spec_list + ([ '|', 'tail', '-n', '1', '|', 'awk', "'{", 'print', '$2', "}'", '|', 'xargs', '-r' ]) + self._spack_cmd + ['compiler', 'add']), shell=True, check=False)
def _get_rules(self): return [ PythonRule( example_function, [0, 0], {}), SubprocessRule( ['echo', 'test']) ]
def _get_package_install_rule(self, package_config): spec_str = self._get_spec_string(package_config) spec_list = self._get_spec_list(package_config) extra_flags = self._get_extra_flags(package_config) arch_flags = self._get_target_architecture_flags(package_config) self._logger.debug( msg='Creating package install rule for spec: {0}'.format(spec_str)) return SubprocessRule(self._spack_cmd + ['install', '-v'] + extra_flags + spec_list + arch_flags)
def _get_image_install_rules(self): rules = [] installed_images = self._get_installed_images() env_path = list(filter( lambda x: re.search('^/usr',x), os.getenv('PATH').split(':'))) for definition in self._confreader['build_config']['definitions']: config = self._create_image_config(definition) installer = self._get_installer_path(config) stage_path = self._get_stage_path(config) install_path = self._get_install_path(config) if config['name'] not in installed_images: env_path_image = { 'PATH': ':'.join([os.path.join(stage_path, 'bin')] + env_path) } rules.extend([ LoggingRule(( "Image {{name}} not found.\n" "Installing singularity image '{name}' with " "module '{image_name}'").format(**config)), PythonRule( self._prepare_installation_paths, [config['module_name'], config['module_version']]), SubprocessRule(['bash', installer, '-b', '-p', stage_path], shell=True), ]) rules.extend([ SubprocessRule( ['singularity', 'list'], env=env_path_image, shell=True) ]) #rules.append( # PythonRule( # self._update_installed_images, # [config['name'], config])) return rules
def _get_package_install_rule(self, package_config): spec_str = self._get_spec_string(package_config) spec_list = self._get_spec_list(package_config) extra_flags = self._get_extra_flags(package_config) build_env = self._get_build_environment() reuse_flags = ['--reuse'] if self._build_options.get( 'reuse_packages', False) else [] self._logger.debug( msg='Creating package install rule for spec: {0}'.format(spec_str)) return SubprocessRule(self._spack_cmd + ['install', '-v'] + reuse_flags + extra_flags + spec_list, env=build_env)
def _get_clone_build_environment_rule(self): """Clones build environment into a temporary directory""" if not os.path.isdir(self._build_folder): src = self._confreader['build_config'][ 'build_environment_repository'] dest = self._build_folder return [ LoggingRule('Cloning build environment repository'), SubprocessRule( ['git', 'clone', '--depth=1', src, dest, '2>&1'], shell=True) ] return []
def test_subprocess_dry_run(self, capture): sp = SubprocessRule( ['echo', 'test'], stdout_writer=logging.info, stderr_writer=logging.warning)(dry_run=True) capture.check( ( 'SubprocessRule', 'INFO', 'Running SubprocessRule: { sp_function: echo test, env: None, shell: False ' '}' ), )
def _get_rules(self): rules = [ PythonRule( self.good_function, [], {}), SubprocessRule( ['false'] ), PythonRule( self.good_function2, [], {}), ] return rules
def _get_compiler_install_rules(self): rules = [] self._logger.debug(msg='Parsing rules for compilers:') compiler_packages = self._confreader['build_config']['compilers'] rules.append(LoggingRule('Removing old compilers.yml')) rules.append(PythonRule(self._remove_compilers_file)) def get_compiler_find_rule(spec_list): return SubprocessRule( (self._spack_cmd + ['find', '-p'] + spec_list + ([ '|', 'tail', '-n', '1', '|', 'awk', "'{", 'print', '$2', "}'", '|', 'xargs', '-r' ]) + self._spack_cmd + ['compiler', 'add']), shell=True, check=False) def get_compiler_flags_rule(spec_list, package_config): flags = package_config.get('flags', {}) return PythonRule(self._set_compiler_flags, [spec_list[0], flags]) rules.extend([ LoggingRule('Adding default compilers.'), SubprocessRule(self._spack_cmd + ['compiler', 'add'], ) ]) rules.append(LoggingRule('Adding existing compilers.')) for package_config in compiler_packages: spec_str = self._get_spec_string(package_config) spec_list = self._get_spec_list(package_config) self._logger.debug(msg='Creating compiler find rule for spec: {0}'. format(spec_str)) rules.extend([ get_compiler_find_rule(spec_list), get_compiler_flags_rule(spec_list, package_config) ]) rules.append(LoggingRule('Installing compilers.')) for package_config in compiler_packages: spec_list = self._get_spec_list(package_config) if not package_config.get('system_compiler', False): rules.extend([ self._get_package_install_rule(package_config), get_compiler_find_rule(spec_list), get_compiler_flags_rule(spec_list, package_config) ]) rules.append(PythonRule(self._show_compilers)) return rules
def _copy_certs(self): fqdn = self._confreader['build_config']['fqdn'] private_key = self._confreader['build_config']['buildbot_master'].get( 'private_key', None) public_cert = self._confreader['build_config']['buildbot_master'].get( 'public_cert', None) key = os.path.join(self._build_folder, 'certs', 'buildbot.key') cert = os.path.join(self._build_folder, 'certs', 'buildbot.crt') rules = [] if private_key and public_cert: rules.extend([ LoggingRule('Copying certs'), PythonRule(copy_file, args=[ self._confreader['build_config'] ['buildbot_master']['private_key'], key ]), PythonRule(copy_file, args=[ self._confreader['build_config'] ['buildbot_master']['public_cert'], cert ]) ]) else: rules.extend([ LoggingRule('Creating self signed certs', self._logger.warning), SubprocessRule([ 'openssl', 'req', '-x509', '-nodes', '-new', '-keyout', key, '-out', cert, '-days', '365', '-subj', '/CN=%s' % fqdn ], stderr_writer=self._logger.warning) ]) rules.extend([ LoggingRule('Setting cert modes'), PythonRule(os.chmod, args=[key, 0o600]), PythonRule(os.chmod, args=[cert, 0o644]) ]) return rules
def _build_image(self, image, definition, sudo=False, fakeroot=False, debug=False, build_env=None): singularity_build_cmd = ['singularity', 'build'] if debug: singularity_build_cmd.insert(1, '-d') if sudo: singularity_build_cmd.insert(0, 'sudo') if fakeroot: singularity_build_cmd.append('--fakeroot') if not os.path.isfile(image): cmd = SubprocessRule(singularity_build_cmd + [image, definition], env=build_env, shell=True, hide_env=True) cmd()
def _get_environment_install_rules(self): rules = [] installed_environments = self._get_installed_environments( )['environments'] env_path = list( filter(lambda x: re.search('^/usr', x), os.getenv('PATH').split(':'))) for environment in self._confreader['build_config']['environments']: config = self._create_environment_config(environment) environment_name = config['environment_name'] pip_packages = config.pop('pip_packages', []) conda_packages = config.pop('conda_packages', []) condarc = config.pop('condarc', {}) installer = self._get_installer_path(config) install_path = self._get_install_path(config) module_path = self._get_module_path(config) conda_env = { 'PATH': ':'.join([os.path.join(install_path, 'bin')] + env_path) } conda_install_cmd = ['conda', 'install', '--yes', '-n', 'base'] skip_install = False update_install = False installed_checksum = installed_environments.get( environment_name, {}).get('checksum', '') if not installed_checksum: install_msg = ("Environment {environment_name} " "not installed. Starting installation.") elif installed_checksum != config['checksum']: install_msg = ("Environment {environment_name} installed " "but marked for update.") update_install = True else: install_msg = ( "Environment {environment_name} is already installed. " "Skipping installation.") skip_install = True rules.append(LoggingRule(install_msg.format(**config))) if skip_install: continue rules.extend([ PythonRule(self._download_installer, [config]), PythonRule(self._clean_failed, [install_path]), PythonRule( self._makedirs, [install_path, 0o755], ), SubprocessRule( ['bash', installer, '-f', '-b', '-p', install_path], shell=True), ]) rules.extend([ LoggingRule( 'Verifying that only the environment condarc is utilized.' ), PythonRule(self._verify_condarc, kwargs={ 'conda_path': install_path, 'env': conda_env }), LoggingRule('Creating condarc for environment.'), PythonRule( self._update_condarc, [install_path, condarc], ), ]) if update_install: conda_install_cmd.append('--freeze-installed') if conda_packages: rules.extend([ SubprocessRule(conda_install_cmd + conda_packages, env=conda_env, shell=True), ]) rules.extend([ LoggingRule( 'Creating environment.yml from newly built environment.'), PythonRule(self._export_conda_environment, kwargs={ 'conda_path': install_path, 'env': conda_env, }), LoggingRule('Creating module path.'), PythonRule(self._makedirs, [module_path, 0o755]) ]) rules.append( PythonRule(self._update_installed_environments, [config['environment_name'], config])) return rules
def _get_recreate_modules_rules(self): logging_rule = LoggingRule('Recreating modules.') recreate_rule = SubprocessRule( self._spack_cmd + ['module', 'lmod', 'refresh', '-y', '--delete-tree']) return [logging_rule, recreate_rule]
def _get_package_spec_rule(self, package_config): spec_str = self._get_spec_string(package_config) spec_list = self._get_spec_list(package_config) self._logger.debug( msg='Creating package spec rule for spec: {0}'.format(spec_str)) return SubprocessRule(self._spack_cmd + ['spec'] + spec_list)
def _get_reindex_rules(self): logging_rule = LoggingRule('Re-indexing installed packages.') reindex_rule = SubprocessRule(self._spack_cmd + ['reindex']) return [logging_rule, reindex_rule]
def _get_image_install_rules(self): rules = [] default_env = { 'SINGULARITY_CACHEDIR': self._source_cache, 'SINGULARITY_TMPDIR': self._tmpdir } uid = os.getuid() # Obtain already installed images installed_images = self._get_installed_images()['images'] remove_after_update = self._confreader['config']['config'].get( 'remove_after_update', False) rules.extend([ LoggingRule('Cleaning up images in staging path: %s' % self._build_stage), PythonRule(self._clean_staging), ]) for definition in self._confreader['build_config']['definitions']: for tag in definition.pop('tags'): image_config = self._get_image_config(tag, definition) nameformat = image_config.pop('nameformat') commands = image_config.pop('commands') install_name = '%s/%s' % (image_config['module_namespace'], image_config['module_name']) stage_definition_path = os.path.join(self._build_stage, 'definitions') stage_image_path = os.path.join(self._build_stage, 'images') stage_definition = os.path.join(stage_definition_path, '{0}.def'.format(nameformat)) stage_image = os.path.join(stage_image_path, '{0}.sif'.format(nameformat)) install_definition_path = os.path.join(self._install_path, 'definitions') install_image_path = os.path.join(self._install_path, 'images') install_definition = os.path.join( install_definition_path, os.path.basename(stage_definition)) install_image = os.path.join(install_image_path, os.path.basename(stage_image)) module_path = os.path.join(self._module_path, image_config['module_namespace'], image_config['name']) image_config['definition_file'] = install_definition image_config['image_file'] = install_image image_config['module_path'] = module_path build_env = copy.deepcopy(default_env) auths = self._auths.get(image_config['registry'], None) if auths: rules.append( LoggingRule( ("Using authentication for user " "'%s' with registry '%s'") % (auths['username'], image_config['registry']))) build_env.update({ 'SINGULARITY_DOCKER_USERNAME': auths['username'], 'SINGULARITY_DOCKER_PASSWORD': auths['password'] }) skip_install = False update_install = False # Check if same kind of an image is already installed installed_checksums = [ installed_image['checksum'] for installed_image in installed_images.values() ] previous_image_path = installed_images.get( install_name, {}).get('image_file', None) if image_config['checksum'] in installed_checksums: install_msg = ("Image {0} is already installed. " "Skipping installation.") skip_install = True elif previous_image_path: install_msg = ("Image {0} installed " "but marked for update.") update_install = True else: install_msg = ("Image {0} is " "not installed. Starting installation.") rules.append(LoggingRule(install_msg.format(install_name))) if not skip_install: rules.extend([ PythonRule(makedirs, [stage_definition_path]), PythonRule(makedirs, [stage_image_path]), PythonRule(makedirs, [install_definition_path]), PythonRule(makedirs, [install_image_path]), PythonRule(makedirs, [module_path]), ]) rules.extend([ LoggingRule('Writing definition file for %s' % install_name), PythonRule(self._write_definition_file, args=[stage_definition], kwargs={ 'registry': image_config['registry'], 'docker_url': image_config['docker_url'], 'commands': commands }), ]) debug = self._confreader['config']['config'].get( 'debug', False) sudo = self._confreader['config']['config'].get( 'sudo', False) fakeroot = self._confreader['config']['config'].get( 'fakeroot', False) if 'debug' in image_config: debug = image_config['debug'] if 'sudo' in image_config: sudo = image_config['sudo'] if 'fakeroot' in image_config: fakeroot = image_config['fakeroot'] rules.extend([ LoggingRule('Building image for %s' % install_name), PythonRule(self._build_image, [stage_image, stage_definition], { 'debug': debug, 'sudo': sudo, 'fakeroot': fakeroot, 'build_env': build_env }, hide_kwargs=True) ]) if sudo: chown_cmd = ['sudo', 'chown', '{0}:{0}'.format(uid)] rules.append( SubprocessRule(chown_cmd + [stage_image], shell=True)) rules.extend([ LoggingRule( 'Copying staged image to installation directory'), PythonRule(copy_file, [stage_image, install_image]), ]) rules.extend([ LoggingRule( 'Copying definition file to installation directory' ), PythonRule(copy_file, [stage_definition, install_definition]), ]) rules.extend([ LoggingRule('Updating installed images'), PythonRule(self._update_installed_images, [install_name, image_config]) ]) if update_install and remove_after_update: rules.extend([ LoggingRule(('Removing old image from ' '{0}').format(previous_image_path)), PythonRule(os.remove, [previous_image_path]) ]) rules.extend([ LoggingRule('Writing modulefile for %s' % install_name), PythonRule(self._write_modulefile, [ image_config['name'], image_config['tag'], image_config['flags'], install_image, module_path ]), ]) return rules
def _copy_ssh(self): """Copies or creates ssh keys based on configuration""" rules = [] workers = [{'name': 'master', 'builds': {}}] workers.extend(self._confreader['build_config']['target_workers']) auth_ssh_conf = self._confreader['build_config'].get('auths', {}).get( 'ssh', {}) ssh_config_src = auth_ssh_conf.get('config_file', None) known_hosts_src = auth_ssh_conf.get('known_hosts_file', None) private_keys = auth_ssh_conf.get('private_keys', []) public_keys = auth_ssh_conf.get('public_keys', []) for worker in workers: ssh_folder = os.path.join(self._mountpoints['home']['path'], worker['name'], '.ssh') rules.append( LoggingRule(('Copying ssh settings ' 'to home folder of %s') % worker['name'])) if ssh_config_src: ssh_config_target = os.path.join(ssh_folder, 'config') rules.extend([ PythonRule(copy_file, args=[ssh_config_src, ssh_config_target], kwargs={'chmod': 0o644}) ]) if known_hosts_src: known_hosts_target = os.path.join(ssh_folder, 'known_hosts') rules.extend([ PythonRule(copy_file, args=[known_hosts_src, known_hosts_target], kwargs={'chmod': 0o600}) ]) if public_keys: for public_key_src in public_keys: public_key_target = os.path.join( ssh_folder, os.path.basename(public_key_src)) rules.extend([ PythonRule(copy_file, args=[public_key_src, public_key_target], kwargs={'chmod': 0o644}) ]) if private_keys: for private_key_src in private_keys: private_key_target = os.path.join( ssh_folder, os.path.basename(private_key_src)) rules.extend([ PythonRule(copy_file, args=[private_key_src, private_key_target], kwargs={'chmod': 0o600}) ]) else: private_key_target = os.path.join(ssh_folder, 'id_rsa_autogen') if os.path.isfile(private_key_target): rules.append( LoggingRule(('Autogenerated ssh key ' '{0} exists. Skipping key ' 'generation.').format(private_key_target), stdout_writer=self._logger.warning)) else: rules.extend([ LoggingRule('No private keys given, generating them.', stdout_writer=self._logger.warning), SubprocessRule([ 'ssh-keygen', '-t', 'rsa', '-b', '4096', '-N', '""', '-q', '-f', private_key_target ], shell=True) ]) # Adding newly generated keys to key lists so that they # will be cloned to other workers private_keys.append(private_key_target) public_keys.append('%s.pub' % private_key_target) return rules
def _get_environment_install_rules(self): """ This function returns build rules that install Anaconda environments. Returns: list: List of build rules that install Anaconda environments. """ rules = [] # Obtain already installed environments installed_environments = self._get_installed_environments( )['environments'] # Only use system paths during installations env_path = list( filter(lambda x: re.search('^/(usr|bin|sbin)', x), os.getenv('PATH').split(':'))) for environment in self._confreader['build_config']['environments']: environment_config = self._create_environment_config(environment) environment_name = environment_config['environment_name'] pip_packages = environment_config.get('pip_packages', []) conda_packages = environment_config.get('conda_packages', []) condarc = environment_config.get('condarc', {}) condarc_install = environment_config.get('condarc_install', {}) condarc_postinstall = environment_config.get( 'condarc_postinstall', {}) extra_module_variables = environment_config.get( 'extra_module_variables', {}) conda_install_cmd = [ environment_config['conda_cmd'], 'install', '--yes', '-n', 'base' ] pip_install_cmd = [ 'pip', 'install', '--cache-dir', self._pip_cache ] skip_install = False update_install = False freeze = environment_config.get('freeze', False) install_path = self._get_install_path(environment_config) module_path = self._get_module_path(environment_config) # Check if same kind of an environment is already installed installed_checksum = installed_environments.get( environment_name, {}).get('checksum', '') if not installed_checksum: install_msg = ("Environment {environment_name} " "not installed. Starting installation.") elif installed_checksum != environment_config[ 'checksum'] and not freeze: previous_environment = installed_environments[ environment_name]['environment_file'] previous_install_path = installed_environments[ environment_name]['install_path'] install_msg = ("Environment {environment_name} installed " "but marked for update.") update_install = True else: install_msg = ( "Environment {environment_name} is already installed. " "Skipping installation.") install_path = installed_environments[environment_name][ 'install_path'] module_path = installed_environments[environment_name][ 'module_path'] skip_install = True installer = self._get_installer_path( environment_config, update_installer=update_install) # Add new installation path to PATH conda_env = { 'PATH': ':'.join([os.path.join(install_path, 'bin')] + env_path), 'PYTHONUNBUFFERED': '1', } environment_config['install_path'] = install_path environment_config['module_path'] = module_path environment_config[ 'environment_file'] = self._get_environment_file_path( install_path) rules.append(LoggingRule(install_msg.format(**environment_config))) if not skip_install: # Install base environment rules.extend([ PythonRule(self._remove_environment, [install_path]), PythonRule( self._download_installer, [installer, environment_config['installer_version']]), PythonRule( makedirs, [install_path, 0o755], ), SubprocessRule( ['bash', installer, '-f', '-b', '-p', install_path], shell=True), ]) rules.extend([ # Verify no external condarc is used LoggingRule( 'Verifying that only the environment condarc is utilized.' ), PythonRule(self._verify_condarc, [install_path]), # Install mamba if needed LoggingRule('Installing mamba & conda-pack if needed.'), PythonRule( self._install_package_tools, [ install_path, environment_config['mamba'], environment_config.get('conda_pack', False), conda_env, ], ), # Create condarc for the installed environment LoggingRule('Creating condarc for environment.'), PythonRule( self._update_condarc, [ install_path, condarc, condarc_install, condarc_postinstall ], ), ]) # During update, install old packages using environment.yml if update_install: rules.extend([ LoggingRule(( 'Sanitizing environment file from previous installation ' '"{0}" to new installation "{1}"').format( previous_environment, environment_config['environment_file'])), PythonRule( self._sanitize_environment_file, [ previous_environment, environment_config['environment_file'] ], ), LoggingRule(('Installing conda packages from previous ' 'installation.')), SubprocessRule([ environment_config['conda_cmd'], 'env', 'update', '--file', environment_config['environment_file'], '--prefix', install_path ], env=conda_env, shell=True) ]) conda_install_cmd.append('--freeze-installed') pip_install_cmd.extend( ['--upgrade', '--upgrade-strategy', 'only-if-needed']) # Install packages using conda if conda_packages: rules.extend([ LoggingRule('Installing conda packages.'), SubprocessRule(conda_install_cmd + conda_packages, env=conda_env, shell=True), ]) # Install packages using pip if pip_packages: rules.extend([ LoggingRule('Installing pip packages.'), SubprocessRule(pip_install_cmd + pip_packages, env=conda_env, shell=True), ]) # Create environment.yml rules.extend([ LoggingRule( 'Creating environment.yml from newly built environment.' ), PythonRule(self._export_conda_environment, [install_path]) ]) # Add newly created environment to installed environments rules.extend([ LoggingRule('Updating installed_environments.yml.'), PythonRule(self._update_installed_environments, [ environment_config['environment_name'], environment_config ]), ]) if update_install and self.remove_after_update: rules.extend([ LoggingRule(('Removing old environment from ' '{0}').format(previous_install_path)), PythonRule(self._remove_environment, [previous_install_path]) ]) # Update .condarc rules.extend([ LoggingRule('Creating condarc for environment: %s' % environment_name), PythonRule(self._update_condarc, [ install_path, condarc, condarc_install, condarc_postinstall ], {'install_time': False}) ]) # Pack the environment if environment_config.get('conda_pack', False): rules.extend([ LoggingRule('Creating conda-pack from the environment.'), PythonRule(self._conda_pack_environment, [ install_path, self._conda_pack_path, environment_config['name'], environment_config['version'], environment_config['checksum_small'], conda_env, ]) ]) # Create modulefile for the environment rules.extend([ LoggingRule('Creating modulefile for environment: %s' % environment_name), PythonRule(self._write_modulefile, [ environment_config['name'], environment_config['version'], install_path, module_path, extra_module_variables ]) ]) return rules