def _get_rules(self): return [ PythonRule( print_keys, [self._confreader['book']], {}), PythonRule( print_keys, [self._confreader['test']], {}) ]
def _get_directory_creation_rules(self): rules = [] rules.extend([ LoggingRule('Creating cache directory: %s' % self._source_cache), PythonRule(self._makedirs, [self._source_cache, 0o755]), LoggingRule('Creating stage directory: %s' % self._build_stage), PythonRule(self._makedirs, [self._build_stage, 0o755]), LoggingRule('Creating installation directory: %s' % self._install_path), PythonRule(self._makedirs, [self._install_path, 0o755]), LoggingRule('Creating module directory: %s' % self._module_path), PythonRule(self._makedirs, [self._module_path, 0o755]), ]) return rules
def _get_directory_creation_rules(self): """Creates directories for nfs""" rules = [LoggingRule('Creating build directories')] workers = [{'name': 'master', 'builds': {}}] workers.extend(self._confreader['build_config']['target_workers']) for worker in workers: worker_home_folder = os.path.join(self._nfs_folder, 'buildbot_home', worker['name']) worker_ssh_folder = os.path.join(worker_home_folder, '.ssh') rules.extend([ LoggingRule(('Creating nfs home directory ' 'for worker %s') % worker['name']), PythonRule(self._makedirs, args=[worker_ssh_folder], kwargs={'chmod': 0o700}), LoggingRule('Creating .bashrc'), PythonRule(self._write_template, args=[ os.path.join(worker_home_folder, '.bashrc'), os.path.join(self._templates_folder, 'bashrc.j2'), ]), LoggingRule(('Creating build and software ' 'directories for worker %s') % worker['name']) ]) for builder_name, builder_opts in self._confreader['build_config'][ 'builds'].items(): if builder_opts.get('enabled', False): rules.extend([ PythonRule( self._makedirs, args=[ os.path.join(self._nfs_folder, 'builds', worker['name'], builder_name), ], ), PythonRule( self._makedirs, args=[ os.path.join(self._nfs_folder, 'software', worker['name'], builder_name), ], ), ]) return rules
def _create_singularity_auths(self): """Creates authentications for docker registries for Singularity builder""" rules = [] workers = [{'name': 'master', 'builds': {}}] workers.extend(self._confreader['build_config']['target_workers']) singularity_auths = { 'auths': self._confreader['build_config'].get('auths', {}).get('singularity', {}) } for worker in workers: singularity_auths_file = os.path.join( self._mountpoints['home']['path'], worker['name'], 'singularity_auths.yaml') rules.append( PythonRule(write_yaml, [singularity_auths_file, singularity_auths])) return rules
def _get_rules(self): new_rule = PythonRule( print_keys, [self._confreader['book']], {}) return [new_rule]
def _get_rules(self): rules = [ PythonRule( self.good_function, [], {}), SubprocessRule( ['false'] ), PythonRule( self.good_function2, [], {}), ] return rules
def _get_compiler_install_rules(self): rules = [] self._logger.debug(msg='Parsing rules for compilers:') compiler_packages = self._confreader['build_config']['compilers'] rules.append(LoggingRule('Removing old compilers.yml')) rules.append(PythonRule(self._remove_compilers_file)) def get_compiler_find_rule(spec_list): return SubprocessRule( (self._spack_cmd + ['find', '-p'] + spec_list + ([ '|', 'tail', '-n', '1', '|', 'awk', "'{", 'print', '$2', "}'", '|', 'xargs', '-r' ]) + self._spack_cmd + ['compiler', 'add']), shell=True, check=False) def get_compiler_flags_rule(spec_list, package_config): flags = package_config.get('flags', {}) return PythonRule(self._set_compiler_flags, [spec_list[0], flags]) rules.extend([ LoggingRule('Adding default compilers.'), SubprocessRule(self._spack_cmd + ['compiler', 'add'], ) ]) rules.append(LoggingRule('Adding existing compilers.')) for package_config in compiler_packages: spec_str = self._get_spec_string(package_config) spec_list = self._get_spec_list(package_config) self._logger.debug(msg='Creating compiler find rule for spec: {0}'. format(spec_str)) rules.extend([ get_compiler_find_rule(spec_list), get_compiler_flags_rule(spec_list, package_config) ]) rules.append(LoggingRule('Installing compilers.')) for package_config in compiler_packages: spec_list = self._get_spec_list(package_config) if not package_config.get('system_compiler', False): rules.extend([ self._get_package_install_rule(package_config), get_compiler_find_rule(spec_list), get_compiler_flags_rule(spec_list, package_config) ]) rules.append(PythonRule(self._show_compilers)) return rules
def _copy_certs(self): fqdn = self._confreader['build_config']['fqdn'] private_key = self._confreader['build_config']['buildbot_master'].get( 'private_key', None) public_cert = self._confreader['build_config']['buildbot_master'].get( 'public_cert', None) key = os.path.join(self._build_folder, 'certs', 'buildbot.key') cert = os.path.join(self._build_folder, 'certs', 'buildbot.crt') rules = [] if private_key and public_cert: rules.extend([ LoggingRule('Copying certs'), PythonRule(copy_file, args=[ self._confreader['build_config'] ['buildbot_master']['private_key'], key ]), PythonRule(copy_file, args=[ self._confreader['build_config'] ['buildbot_master']['public_cert'], cert ]) ]) else: rules.extend([ LoggingRule('Creating self signed certs', self._logger.warning), SubprocessRule([ 'openssl', 'req', '-x509', '-nodes', '-new', '-keyout', key, '-out', cert, '-days', '365', '-subj', '/CN=%s' % fqdn ], stderr_writer=self._logger.warning) ]) rules.extend([ LoggingRule('Setting cert modes'), PythonRule(os.chmod, args=[key, 0o600]), PythonRule(os.chmod, args=[cert, 0o644]) ]) return rules
def _get_rules(self): return [ PythonRule( example_function, [0, 0], {}), SubprocessRule( ['echo', 'test']) ]
def _get_home_creation_rules(workers): worker_home_folder = os.path.join( self._mountpoints['home']['path'], worker['name']) worker_ssh_folder = os.path.join(worker_home_folder, '.ssh') home_creation_rules = [ LoggingRule(('Creating nfs home directory ' 'for worker %s') % worker['name']), PythonRule(makedirs, args=[worker_ssh_folder], kwargs={'chmod': 0o700}), LoggingRule('Creating .bashrc'), PythonRule(self._template_config, args=[ os.path.join(worker_home_folder, '.bashrc'), os.path.join(self._templates_folder, 'bashrc.j2'), ]), ] return home_creation_rules
def _get_directory_creation_rules(self): rules = [] rules.extend([ LoggingRule('Creating installer cache directory: %s' % self._installer_cache), PythonRule(self._makedirs, [self._installer_cache, 0o755]), LoggingRule('Creating package cache directory: %s' % self._pkg_cache), PythonRule(self._makedirs, [self._pkg_cache, 0o755]), LoggingRule('Creating temporary directory: %s' % self._tmpdir), PythonRule(self._makedirs, [self._tmpdir]), LoggingRule('Creating installation directory: %s' % self._install_path), PythonRule(self._makedirs, [self._install_path, 0o755]), LoggingRule('Creating module directory: %s' % self._module_path), PythonRule(self._makedirs, [self._module_path, 0o755]), ]) return rules
def _get_copy_ci_directory_rule(self): """Copies the template ci directory to build destination""" if not os.path.isdir(self._build_folder): src = os.path.join(os.getcwd(), 'buildrules', 'ci') dest = self._build_folder return [ LoggingRule('Copying CI directory from %s to %s' % (src, dest)), PythonRule(copy_dir, args=[src, dest]), ] return []
def _get_directory_creation_rules(self): """ This function returns builds rules that create required directories. Returns: list: List of build rules. """ rules = [] rules.extend([ LoggingRule('Creating installer cache directory: %s' % self._installer_cache), PythonRule(makedirs, [self._installer_cache, 0o755]), LoggingRule('Creating package cache directory: %s' % self._pkg_cache), PythonRule(makedirs, [self._pkg_cache, 0o755]), LoggingRule('Creating temporary directory: %s' % self._tmpdir), PythonRule(makedirs, [self._tmpdir]), LoggingRule('Creating installation directory: %s' % self._install_path), PythonRule(makedirs, [self._install_path, 0o755]), LoggingRule('Creating module directory: %s' % self._module_path), PythonRule(makedirs, [self._module_path, 0o755]), LoggingRule('Creating conda-pack directory: %s' % self._conda_pack_path), PythonRule(makedirs, [self._conda_pack_path, 0o755]), ]) return rules
def _get_license_copy_rules(self): rules = [] self._logger.debug(msg='Copying license files:') packages = (self._confreader['build_config']['packages'] + self._confreader['build_config']['compilers']) for package_config in packages: if 'licenses' in package_config: rules.append( PythonRule(self._copy_license_rule, [package_config])) return rules
def _get_flatten_lmod_rules(self): """This function will create rules that generate a flat lmod structure from hierarchical modulefiles""" lmod_root = self._confreader['config']['config']['module_roots'][ 'lmod'] rules = [ LoggingRule( 'Removing folders that contain the non-hierarchal module structure.' ), PythonRule( self._remove_all_modules_folders, args=[lmod_root], ), LoggingRule('Copying modules to non-hierarchal module structure.'), PythonRule( self._copy_all_modules, args=[lmod_root], ) ] return rules
def _get_config_creation_rules(self): return [ LoggingRule('Creating buildbot_master.cfg'), PythonRule( self._template_config, args=[ os.path.join(self._conf_folder, 'buildbot', 'buildbot_master.cfg'), os.path.join(self._templates_folder, 'buildbot_master.cfg.j2'), ], ), LoggingRule('Creating docker-compose.yml'), PythonRule( self._template_config, args=[ os.path.join(self._build_folder, 'docker-compose.yml'), os.path.join(self._templates_folder, 'docker-compose.yml.j2'), ], ), LoggingRule('Creating nginx.conf'), PythonRule( self._template_config, args=[ os.path.join(self._conf_folder, 'nginx', 'nginx.conf'), os.path.join(self._templates_folder, 'nginx.conf.j2'), ], ), LoggingRule('Creating exports.txt'), PythonRule( self._template_config, args=[ os.path.join(self._conf_folder, 'nfs', 'exports.txt'), os.path.join(self._templates_folder, 'exports.txt.j2'), ], ), ]
def _get_clean_build_directory_rules(self): """Cleans the build directory from unnecessary files after building""" build_folder_templates_dir = os.path.join(self._build_folder, 'templates') return [ LoggingRule('Cleaning build directory'), LoggingRule('Removing templates from build directory'), PythonRule( rmtree, args=[build_folder_templates_dir], kwargs={'ignore_errors': True}, ), ]
def test_python_rule_dry_run(self, capture): pr = PythonRule( example_function, [], {}, stdout_writer=logging.info, stderr_writer=logging.warning)(dry_run=True) capture.check( ( 'PythonRule', 'INFO', 'Running PythonRule: { function: example_function, args: [], kwargs: {} }' ), )
def _get_modulefile_clean_rules(self): """ This function creates build rules that clean up modulefiles. Returns: list: List of build rules. """ rules = [] # Clean up modulefiles rules.extend([ LoggingRule("Cleaning previous modulefiles."), PythonRule(self._clean_modules), ]) return rules
def _get_image_install_rules(self): rules = [] installed_images = self._get_installed_images() env_path = list(filter( lambda x: re.search('^/usr',x), os.getenv('PATH').split(':'))) for definition in self._confreader['build_config']['definitions']: config = self._create_image_config(definition) installer = self._get_installer_path(config) stage_path = self._get_stage_path(config) install_path = self._get_install_path(config) if config['name'] not in installed_images: env_path_image = { 'PATH': ':'.join([os.path.join(stage_path, 'bin')] + env_path) } rules.extend([ LoggingRule(( "Image {{name}} not found.\n" "Installing singularity image '{name}' with " "module '{image_name}'").format(**config)), PythonRule( self._prepare_installation_paths, [config['module_name'], config['module_version']]), SubprocessRule(['bash', installer, '-b', '-p', stage_path], shell=True), ]) rules.extend([ SubprocessRule( ['singularity', 'list'], env=env_path_image, shell=True) ]) #rules.append( # PythonRule( # self._update_installed_images, # [config['name'], config])) return rules
def _create_swift_auths(self): """Creates authentications for OpenStack deployer""" rules = [] workers = [{'name': 'master', 'builds': {}}] workers.extend(self._confreader['build_config']['target_workers']) swift_auths = { 'auths': self._confreader['build_config'].get('auths', {}).get('swift', {}) } for worker in workers: swift_auths_file = os.path.join(self._mountpoints['home']['path'], worker['name'], 'os_auths.yaml') rules.append( PythonRule(write_yaml, [swift_auths_file, swift_auths])) return rules
def _get_image_install_rules(self): rules = [] default_env = { 'SINGULARITY_CACHEDIR': self._source_cache, 'SINGULARITY_TMPDIR': self._tmpdir } uid = os.getuid() # Obtain already installed images installed_images = self._get_installed_images()['images'] remove_after_update = self._confreader['config']['config'].get( 'remove_after_update', False) rules.extend([ LoggingRule('Cleaning up images in staging path: %s' % self._build_stage), PythonRule(self._clean_staging), ]) for definition in self._confreader['build_config']['definitions']: for tag in definition.pop('tags'): image_config = self._get_image_config(tag, definition) nameformat = image_config.pop('nameformat') commands = image_config.pop('commands') install_name = '%s/%s' % (image_config['module_namespace'], image_config['module_name']) stage_definition_path = os.path.join(self._build_stage, 'definitions') stage_image_path = os.path.join(self._build_stage, 'images') stage_definition = os.path.join(stage_definition_path, '{0}.def'.format(nameformat)) stage_image = os.path.join(stage_image_path, '{0}.sif'.format(nameformat)) install_definition_path = os.path.join(self._install_path, 'definitions') install_image_path = os.path.join(self._install_path, 'images') install_definition = os.path.join( install_definition_path, os.path.basename(stage_definition)) install_image = os.path.join(install_image_path, os.path.basename(stage_image)) module_path = os.path.join(self._module_path, image_config['module_namespace'], image_config['name']) image_config['definition_file'] = install_definition image_config['image_file'] = install_image image_config['module_path'] = module_path build_env = copy.deepcopy(default_env) auths = self._auths.get(image_config['registry'], None) if auths: rules.append( LoggingRule( ("Using authentication for user " "'%s' with registry '%s'") % (auths['username'], image_config['registry']))) build_env.update({ 'SINGULARITY_DOCKER_USERNAME': auths['username'], 'SINGULARITY_DOCKER_PASSWORD': auths['password'] }) skip_install = False update_install = False # Check if same kind of an image is already installed installed_checksums = [ installed_image['checksum'] for installed_image in installed_images.values() ] previous_image_path = installed_images.get( install_name, {}).get('image_file', None) if image_config['checksum'] in installed_checksums: install_msg = ("Image {0} is already installed. " "Skipping installation.") skip_install = True elif previous_image_path: install_msg = ("Image {0} installed " "but marked for update.") update_install = True else: install_msg = ("Image {0} is " "not installed. Starting installation.") rules.append(LoggingRule(install_msg.format(install_name))) if not skip_install: rules.extend([ PythonRule(makedirs, [stage_definition_path]), PythonRule(makedirs, [stage_image_path]), PythonRule(makedirs, [install_definition_path]), PythonRule(makedirs, [install_image_path]), PythonRule(makedirs, [module_path]), ]) rules.extend([ LoggingRule('Writing definition file for %s' % install_name), PythonRule(self._write_definition_file, args=[stage_definition], kwargs={ 'registry': image_config['registry'], 'docker_url': image_config['docker_url'], 'commands': commands }), ]) debug = self._confreader['config']['config'].get( 'debug', False) sudo = self._confreader['config']['config'].get( 'sudo', False) fakeroot = self._confreader['config']['config'].get( 'fakeroot', False) if 'debug' in image_config: debug = image_config['debug'] if 'sudo' in image_config: sudo = image_config['sudo'] if 'fakeroot' in image_config: fakeroot = image_config['fakeroot'] rules.extend([ LoggingRule('Building image for %s' % install_name), PythonRule(self._build_image, [stage_image, stage_definition], { 'debug': debug, 'sudo': sudo, 'fakeroot': fakeroot, 'build_env': build_env }, hide_kwargs=True) ]) if sudo: chown_cmd = ['sudo', 'chown', '{0}:{0}'.format(uid)] rules.append( SubprocessRule(chown_cmd + [stage_image], shell=True)) rules.extend([ LoggingRule( 'Copying staged image to installation directory'), PythonRule(copy_file, [stage_image, install_image]), ]) rules.extend([ LoggingRule( 'Copying definition file to installation directory' ), PythonRule(copy_file, [stage_definition, install_definition]), ]) rules.extend([ LoggingRule('Updating installed images'), PythonRule(self._update_installed_images, [install_name, image_config]) ]) if update_install and remove_after_update: rules.extend([ LoggingRule(('Removing old image from ' '{0}').format(previous_image_path)), PythonRule(os.remove, [previous_image_path]) ]) rules.extend([ LoggingRule('Writing modulefile for %s' % install_name), PythonRule(self._write_modulefile, [ image_config['name'], image_config['tag'], image_config['flags'], install_image, module_path ]), ]) return rules
def _get_environment_install_rules(self): rules = [] installed_environments = self._get_installed_environments( )['environments'] env_path = list( filter(lambda x: re.search('^/usr', x), os.getenv('PATH').split(':'))) for environment in self._confreader['build_config']['environments']: config = self._create_environment_config(environment) environment_name = config['environment_name'] pip_packages = config.pop('pip_packages', []) conda_packages = config.pop('conda_packages', []) condarc = config.pop('condarc', {}) installer = self._get_installer_path(config) install_path = self._get_install_path(config) module_path = self._get_module_path(config) conda_env = { 'PATH': ':'.join([os.path.join(install_path, 'bin')] + env_path) } conda_install_cmd = ['conda', 'install', '--yes', '-n', 'base'] skip_install = False update_install = False installed_checksum = installed_environments.get( environment_name, {}).get('checksum', '') if not installed_checksum: install_msg = ("Environment {environment_name} " "not installed. Starting installation.") elif installed_checksum != config['checksum']: install_msg = ("Environment {environment_name} installed " "but marked for update.") update_install = True else: install_msg = ( "Environment {environment_name} is already installed. " "Skipping installation.") skip_install = True rules.append(LoggingRule(install_msg.format(**config))) if skip_install: continue rules.extend([ PythonRule(self._download_installer, [config]), PythonRule(self._clean_failed, [install_path]), PythonRule( self._makedirs, [install_path, 0o755], ), SubprocessRule( ['bash', installer, '-f', '-b', '-p', install_path], shell=True), ]) rules.extend([ LoggingRule( 'Verifying that only the environment condarc is utilized.' ), PythonRule(self._verify_condarc, kwargs={ 'conda_path': install_path, 'env': conda_env }), LoggingRule('Creating condarc for environment.'), PythonRule( self._update_condarc, [install_path, condarc], ), ]) if update_install: conda_install_cmd.append('--freeze-installed') if conda_packages: rules.extend([ SubprocessRule(conda_install_cmd + conda_packages, env=conda_env, shell=True), ]) rules.extend([ LoggingRule( 'Creating environment.yml from newly built environment.'), PythonRule(self._export_conda_environment, kwargs={ 'conda_path': install_path, 'env': conda_env, }), LoggingRule('Creating module path.'), PythonRule(self._makedirs, [module_path, 0o755]) ]) rules.append( PythonRule(self._update_installed_environments, [config['environment_name'], config])) return rules
def get_rules(self): rules = [] rules.append(LoggingRule('Deploying software with swift deployer:')) rules.append(PythonRule(self._swift_deploy)) return rules
def _get_rules(self): new_rule = PythonRule( self.run_rule, [], {}) return [new_rule]
def _get_directory_creation_rules(self): """Creates directories for nfs""" rules = [ LoggingRule('Creating home directory'), PythonRule(makedirs, args=[self._mountpoints['home']['path']], kwargs={'chmod': 0o700}), LoggingRule('Creating cache directory'), PythonRule(makedirs, args=[self._mountpoints['cache']['path']], kwargs={'chmod': 0o700}), LoggingRule('Creating db directory'), PythonRule(makedirs, args=[self._mountpoints['db']['path']], kwargs={'chmod': 0o700}) ] for builder_name in self._enabled_builders: rules.append( PythonRule(makedirs, args=[ os.path.join(self._mountpoints['cache']['path'], builder_name) ], kwargs={'chmod': 0o700})) master = [{'name': 'master'}] workers = self._confreader['build_config']['target_workers'] def _get_home_creation_rules(workers): worker_home_folder = os.path.join( self._mountpoints['home']['path'], worker['name']) worker_ssh_folder = os.path.join(worker_home_folder, '.ssh') home_creation_rules = [ LoggingRule(('Creating nfs home directory ' 'for worker %s') % worker['name']), PythonRule(makedirs, args=[worker_ssh_folder], kwargs={'chmod': 0o700}), LoggingRule('Creating .bashrc'), PythonRule(self._template_config, args=[ os.path.join(worker_home_folder, '.bashrc'), os.path.join(self._templates_folder, 'bashrc.j2'), ]), ] return home_creation_rules for worker in master + workers: rules.extend(_get_home_creation_rules(worker)) for worker in workers: worker_name = worker['name'] rules.append( LoggingRule(('Creating build and software ' 'directories for worker %s') % worker_name)) for builder_name in self._enabled_builders: rules.extend([ PythonRule( makedirs, args=[ os.path.join(self._mountpoints['builds']['path'], worker_name, builder_name), ], ), PythonRule( makedirs, args=[ os.path.join(self._mountpoints['software']['path'], worker_name, builder_name), ], ), ]) return rules
def _copy_ssh(self): """Copies or creates ssh keys based on configuration""" rules = [] workers = [{'name': 'master', 'builds': {}}] workers.extend(self._confreader['build_config']['target_workers']) auth_ssh_conf = self._confreader['build_config'].get('auths', {}).get( 'ssh', {}) ssh_config_src = auth_ssh_conf.get('config_file', None) known_hosts_src = auth_ssh_conf.get('known_hosts_file', None) private_keys = auth_ssh_conf.get('private_keys', []) public_keys = auth_ssh_conf.get('public_keys', []) for worker in workers: ssh_folder = os.path.join(self._mountpoints['home']['path'], worker['name'], '.ssh') rules.append( LoggingRule(('Copying ssh settings ' 'to home folder of %s') % worker['name'])) if ssh_config_src: ssh_config_target = os.path.join(ssh_folder, 'config') rules.extend([ PythonRule(copy_file, args=[ssh_config_src, ssh_config_target], kwargs={'chmod': 0o644}) ]) if known_hosts_src: known_hosts_target = os.path.join(ssh_folder, 'known_hosts') rules.extend([ PythonRule(copy_file, args=[known_hosts_src, known_hosts_target], kwargs={'chmod': 0o600}) ]) if public_keys: for public_key_src in public_keys: public_key_target = os.path.join( ssh_folder, os.path.basename(public_key_src)) rules.extend([ PythonRule(copy_file, args=[public_key_src, public_key_target], kwargs={'chmod': 0o644}) ]) if private_keys: for private_key_src in private_keys: private_key_target = os.path.join( ssh_folder, os.path.basename(private_key_src)) rules.extend([ PythonRule(copy_file, args=[private_key_src, private_key_target], kwargs={'chmod': 0o600}) ]) else: private_key_target = os.path.join(ssh_folder, 'id_rsa_autogen') if os.path.isfile(private_key_target): rules.append( LoggingRule(('Autogenerated ssh key ' '{0} exists. Skipping key ' 'generation.').format(private_key_target), stdout_writer=self._logger.warning)) else: rules.extend([ LoggingRule('No private keys given, generating them.', stdout_writer=self._logger.warning), SubprocessRule([ 'ssh-keygen', '-t', 'rsa', '-b', '4096', '-N', '""', '-q', '-f', private_key_target ], shell=True) ]) # Adding newly generated keys to key lists so that they # will be cloned to other workers private_keys.append(private_key_target) public_keys.append('%s.pub' % private_key_target) return rules
def _get_environment_install_rules(self): """ This function returns build rules that install Anaconda environments. Returns: list: List of build rules that install Anaconda environments. """ rules = [] # Obtain already installed environments installed_environments = self._get_installed_environments( )['environments'] # Only use system paths during installations env_path = list( filter(lambda x: re.search('^/(usr|bin|sbin)', x), os.getenv('PATH').split(':'))) for environment in self._confreader['build_config']['environments']: environment_config = self._create_environment_config(environment) environment_name = environment_config['environment_name'] pip_packages = environment_config.get('pip_packages', []) conda_packages = environment_config.get('conda_packages', []) condarc = environment_config.get('condarc', {}) condarc_install = environment_config.get('condarc_install', {}) condarc_postinstall = environment_config.get( 'condarc_postinstall', {}) extra_module_variables = environment_config.get( 'extra_module_variables', {}) conda_install_cmd = [ environment_config['conda_cmd'], 'install', '--yes', '-n', 'base' ] pip_install_cmd = [ 'pip', 'install', '--cache-dir', self._pip_cache ] skip_install = False update_install = False freeze = environment_config.get('freeze', False) install_path = self._get_install_path(environment_config) module_path = self._get_module_path(environment_config) # Check if same kind of an environment is already installed installed_checksum = installed_environments.get( environment_name, {}).get('checksum', '') if not installed_checksum: install_msg = ("Environment {environment_name} " "not installed. Starting installation.") elif installed_checksum != environment_config[ 'checksum'] and not freeze: previous_environment = installed_environments[ environment_name]['environment_file'] previous_install_path = installed_environments[ environment_name]['install_path'] install_msg = ("Environment {environment_name} installed " "but marked for update.") update_install = True else: install_msg = ( "Environment {environment_name} is already installed. " "Skipping installation.") install_path = installed_environments[environment_name][ 'install_path'] module_path = installed_environments[environment_name][ 'module_path'] skip_install = True installer = self._get_installer_path( environment_config, update_installer=update_install) # Add new installation path to PATH conda_env = { 'PATH': ':'.join([os.path.join(install_path, 'bin')] + env_path), 'PYTHONUNBUFFERED': '1', } environment_config['install_path'] = install_path environment_config['module_path'] = module_path environment_config[ 'environment_file'] = self._get_environment_file_path( install_path) rules.append(LoggingRule(install_msg.format(**environment_config))) if not skip_install: # Install base environment rules.extend([ PythonRule(self._remove_environment, [install_path]), PythonRule( self._download_installer, [installer, environment_config['installer_version']]), PythonRule( makedirs, [install_path, 0o755], ), SubprocessRule( ['bash', installer, '-f', '-b', '-p', install_path], shell=True), ]) rules.extend([ # Verify no external condarc is used LoggingRule( 'Verifying that only the environment condarc is utilized.' ), PythonRule(self._verify_condarc, [install_path]), # Install mamba if needed LoggingRule('Installing mamba & conda-pack if needed.'), PythonRule( self._install_package_tools, [ install_path, environment_config['mamba'], environment_config.get('conda_pack', False), conda_env, ], ), # Create condarc for the installed environment LoggingRule('Creating condarc for environment.'), PythonRule( self._update_condarc, [ install_path, condarc, condarc_install, condarc_postinstall ], ), ]) # During update, install old packages using environment.yml if update_install: rules.extend([ LoggingRule(( 'Sanitizing environment file from previous installation ' '"{0}" to new installation "{1}"').format( previous_environment, environment_config['environment_file'])), PythonRule( self._sanitize_environment_file, [ previous_environment, environment_config['environment_file'] ], ), LoggingRule(('Installing conda packages from previous ' 'installation.')), SubprocessRule([ environment_config['conda_cmd'], 'env', 'update', '--file', environment_config['environment_file'], '--prefix', install_path ], env=conda_env, shell=True) ]) conda_install_cmd.append('--freeze-installed') pip_install_cmd.extend( ['--upgrade', '--upgrade-strategy', 'only-if-needed']) # Install packages using conda if conda_packages: rules.extend([ LoggingRule('Installing conda packages.'), SubprocessRule(conda_install_cmd + conda_packages, env=conda_env, shell=True), ]) # Install packages using pip if pip_packages: rules.extend([ LoggingRule('Installing pip packages.'), SubprocessRule(pip_install_cmd + pip_packages, env=conda_env, shell=True), ]) # Create environment.yml rules.extend([ LoggingRule( 'Creating environment.yml from newly built environment.' ), PythonRule(self._export_conda_environment, [install_path]) ]) # Add newly created environment to installed environments rules.extend([ LoggingRule('Updating installed_environments.yml.'), PythonRule(self._update_installed_environments, [ environment_config['environment_name'], environment_config ]), ]) if update_install and self.remove_after_update: rules.extend([ LoggingRule(('Removing old environment from ' '{0}').format(previous_install_path)), PythonRule(self._remove_environment, [previous_install_path]) ]) # Update .condarc rules.extend([ LoggingRule('Creating condarc for environment: %s' % environment_name), PythonRule(self._update_condarc, [ install_path, condarc, condarc_install, condarc_postinstall ], {'install_time': False}) ]) # Pack the environment if environment_config.get('conda_pack', False): rules.extend([ LoggingRule('Creating conda-pack from the environment.'), PythonRule(self._conda_pack_environment, [ install_path, self._conda_pack_path, environment_config['name'], environment_config['version'], environment_config['checksum_small'], conda_env, ]) ]) # Create modulefile for the environment rules.extend([ LoggingRule('Creating modulefile for environment: %s' % environment_name), PythonRule(self._write_modulefile, [ environment_config['name'], environment_config['version'], install_path, module_path, extra_module_variables ]) ]) return rules
def test_python_rule(self, capture): """This function tests behaviour of the class buildrules.common.rule.PythonRule.""" self.assertEqual( PythonRule( example_function, [], {}, stdout_writer=logging.info, stderr_writer=logging.warning)(), 3) self.assertEqual( PythonRule( example_function, [3, 4], {}, stdout_writer=logging.info, stderr_writer=logging.warning)(), 7) self.assertEqual( PythonRule( example_function, [5], {'val2': 6}, stdout_writer=logging.info, stderr_writer=logging.warning)(), 11) self.assertEqual( PythonRule( example_function, [], {'val1': 7, 'val2': 8}, stdout_writer=logging.info, stderr_writer=logging.warning)(), 15) self.assertEqual( PythonRule( example_function, [], {}, stdout_writer=logging.warning, stderr_writer=logging.warning)(), 3) capture.check( ( 'PythonRule', 'INFO', 'Running PythonRule: { function: ' 'example_function, args: [], ' 'kwargs: {} }' ), ( 'PythonRule', 'INFO', 'Running PythonRule: { function: ' 'example_function, args: [3, ' '4], kwargs: {} }' ), ( 'PythonRule', 'INFO', 'Running PythonRule: { function: ' 'example_function, args: [5], ' "kwargs: {'val2': 6} }" ), ( 'PythonRule', 'INFO', 'Running PythonRule: { function: ' 'example_function, args: [], ' "kwargs: {'val1': 7, 'val2': 8} }" ), ( 'PythonRule', 'INFO', 'Running PythonRule: { function: ' 'example_function, args: [], ' 'kwargs: {} }' ) )
def get_compiler_flags_rule(spec_list, package_config): flags = package_config.get('flags', {}) return PythonRule(self._set_compiler_flags, [spec_list[0], flags])