def test_spec_set_include_exclude_conflict(): """Exclude should override include""" yaml_file = { 'spec-set': { 'include': ['gmake'], 'exclude': ['gmake'], 'matrix': [ { 'packages': { 'gmake': { 'versions': ['4.0'] } } }, { 'compilers': { 'gcc': { 'versions': ['4.2.1', '6.3.0'] }, 'clang': { 'versions': ['8.0', '3.8'] } } }, ] } } spec_set = CombinatorialSpecSet(yaml_file, False) specs = list(spec for spec in spec_set) assert len(specs) == 0
def test_compiler_specs(): spec_set = CombinatorialSpecSet(basic_yaml_file, False) compilers = spec_set._compiler_specs({ 'gcc': { 'versions': ['4.2.1', '6.3.0'] }, 'clang': { 'versions': ['8.0', '3.8'] } }) assert len(list(compilers)) == 4 assert Spec('%[email protected]') in compilers assert Spec('%[email protected]') in compilers assert Spec('%[email protected]') in compilers assert Spec('%[email protected]') in compilers
def test_spec_set_no_include(): """Make sure that without any exclude or include, we get the full cross- product of specs/versions.""" yaml_file = { 'spec-set': { 'matrix': [ { 'packages': { 'gmake': { 'versions': ['4.0'] } } }, { 'compilers': { 'gcc': { 'versions': ['4.2.1', '6.3.0'] }, 'clang': { 'versions': ['8.0', '3.8'] } } }, ] } } spec_set = CombinatorialSpecSet(yaml_file, False) specs = list(spec for spec in spec_set) assert len(specs) == 4
def test_spec_set_with_specs(): """Make sure we only see the specs mentioned in the include""" yaml_file = { 'spec-set': { 'include': ['gmake', 'appres'], 'matrix': [ { 'specs': ['[email protected]', '[email protected]', '[email protected]'] }, { 'compilers': { 'gcc': { 'versions': ['4.2.1', '6.3.0'] }, 'clang': { 'versions': ['8.0', '3.8'] } } }, ] } } spec_set = CombinatorialSpecSet(yaml_file, False) specs = list(spec for spec in spec_set) assert len(specs) == 8
def test_spec_set_get_cdash_array(): """Make sure we can handle multiple cdash sites in a list""" yaml_file = { 'spec-set': { 'cdash': ['http://example.com/cdash', 'http://example.com/cdash2'], 'project': 'testproj', 'matrix': [ { 'packages': { 'gmake': { 'versions': ['4.0'] }, } }, { 'compilers': { 'gcc': { 'versions': ['4.2.1', '6.3.0'] }, 'clang': { 'versions': ['8.0', '3.8'] }, } }, ] } } spec_set = CombinatorialSpecSet(yaml_file) assert spec_set.cdash == [ 'http://example.com/cdash', 'http://example.com/cdash2' ] assert spec_set.project == 'testproj'
def test_spec_set_basic(): """The "include" isn't required, but if it is present, we should only see specs mentioned there. Also, if we include cdash and project properties, those should be captured and stored on the resulting CombinatorialSpecSet as attributes.""" spec_set = CombinatorialSpecSet(basic_yaml_file, False) specs = list(spec for spec in spec_set) assert len(specs) == 4 assert spec_set.cdash == ['http://example.com/cdash'] assert spec_set.project == 'testproj'
def test_package_specs(): spec_set = CombinatorialSpecSet(basic_yaml_file, False) packages = spec_set._package_specs({ 'gmake': { 'versions': ['4.0', '5.0'] }, 'appres': { 'versions': ['1.0.4'] }, 'allinea-reports': { 'versions': ['6.0.1', '6.0.3', '6.0.4'] } }) assert Spec('[email protected]') in packages assert Spec('[email protected]') in packages assert Spec('[email protected]') in packages assert Spec('[email protected]') in packages assert Spec('[email protected]') in packages assert Spec('[email protected]') in packages
def test_spec_set_simple_spec_list(): """Make sure we can handle the slightly more concise syntax where we include the package name/version together and skip the extra keys in the dictionary.""" yaml_file = { 'spec-set': { 'matrix': [ { 'specs': ['[email protected]', '[email protected]', '[email protected]'] }, ] } } spec_set = CombinatorialSpecSet(yaml_file, False) specs = list(spec for spec in spec_set) assert len(specs) == 3
def test_spec_set_packages_no_matrix(): """The matrix property is required, make sure we error out if it is missing""" yaml_file = { 'spec-set': { 'include': ['gmake'], 'packages': { 'gmake': { 'versions': ['4.0'] }, 'appres': { 'versions': ['1.0.4'] }, 'allinea-reports': { 'versions': ['6.0.4'] } }, } } with pytest.raises(ValidationError): CombinatorialSpecSet(yaml_file)
def check_binaries(args): """Check specs (either a single spec from --spec, or else the full set of release specs) against remote binary mirror(s) to see if any need to be rebuilt. This command uses the process exit code to indicate its result, specifically, if the exit code is non-zero, then at least one of the indicated specs needs to be rebuilt. """ if args.spec or args.spec_yaml: specs = [get_concrete_spec(args)] else: release_specs_path = os.path.join(etc_path, 'spack', 'defaults', 'release.yaml') spec_set = CombinatorialSpecSet.from_file(release_specs_path) specs = [spec for spec in spec_set] if not specs: tty.msg('No specs provided, exiting.') sys.exit(0) for spec in specs: spec.concretize() # Next see if there are any configured binary mirrors configured_mirrors = spack.config.get('mirrors', scope=args.scope) if args.mirror_url: configured_mirrors = {'additionalMirrorUrl': args.mirror_url} if not configured_mirrors: tty.msg('No mirrors provided, exiting.') sys.exit(0) sys.exit( bindist.check_specs_against_mirrors(configured_mirrors, specs, args.output_file, args.rebuild_on_error))
def test_spec_set_include_limited_packages(): """If we see the include key, it is a filter and only the specs mentioned there should actually be included.""" yaml_file = { 'spec-set': { 'include': ['gmake'], 'matrix': [ { 'packages': { 'gmake': { 'versions': ['4.0'] }, 'appres': { 'versions': ['1.0.4'] }, 'allinea-reports': { 'versions': ['6.0.4'] } } }, { 'compilers': { 'gcc': { 'versions': ['4.2.1', '6.3.0'] }, 'clang': { 'versions': ['8.0', '3.8'] } } }, ] } } spec_set = CombinatorialSpecSet(yaml_file, False) specs = list(spec for spec in spec_set) assert len(specs) == 4
def test_spec_set_exclude(): """The exclude property isn't required, but if it appears, any specs mentioned there should not appear in the output specs""" yaml_file = { 'spec-set': { 'exclude': ['gmake'], 'matrix': [ { 'packages': { 'gmake': { 'versions': ['4.0'] }, 'appres': { 'versions': ['1.0.4'] }, 'allinea-reports': { 'versions': ['6.0.4'] } } }, { 'compilers': { 'gcc': { 'versions': ['4.2.1', '6.3.0'] }, 'clang': { 'versions': ['8.0', '3.8'] } } }, ] } } spec_set = CombinatorialSpecSet(yaml_file, False) specs = list(spec for spec in spec_set) assert len(specs) == 8
def release_jobs(parser, args): share_path = os.path.join(spack_root, 'share', 'spack', 'docker') os_container_mapping_path = os.path.join( share_path, 'os-container-mapping.yaml') with open(os_container_mapping_path, 'r') as fin: os_container_mapping = syaml.load(fin) try: validate(os_container_mapping, mapping_schema) except ValidationError as val_err: tty.error('Ill-formed os-container-mapping configuration object') tty.error(os_container_mapping) tty.debug(val_err) return containers = os_container_mapping['containers'] if args.specs: # Just print out the spec labels and all dependency edges in # a json format. spec_list = [Spec(s) for s in args.specs] with open(args.specs_deps_output, 'w') as out: compute_spec_deps(spec_list, out) return current_system = sys_type() if args.resolve_deps_locally else None release_specs_path = args.spec_set if not release_specs_path: raise SpackError('Must provide path to release spec-set') release_spec_set = CombinatorialSpecSet.from_file(release_specs_path) mirror_url = args.mirror_url if not mirror_url: raise SpackError('Must provide url of target binary mirror') cdash_url = args.cdash_url spec_labels, dependencies, stages = stage_spec_jobs( release_spec_set, containers, current_system) if not stages: tty.msg('No jobs staged, exiting.') return if args.print_summary: print_staging_summary(spec_labels, dependencies, stages) output_object = {} job_count = 0 stage_names = ['stage-{0}'.format(i) for i in range(len(stages))] stage = 0 for stage_jobs in stages: stage_name = stage_names[stage] for spec_label in stage_jobs: release_spec = spec_labels[spec_label]['spec'] root_spec = spec_labels[spec_label]['rootSpec'] pkg_compiler = release_spec.compiler pkg_hash = release_spec.dag_hash() osname = str(release_spec.architecture) job_name = get_job_name(release_spec, osname) container_info = containers[osname] build_image = container_info['image'] job_scripts = ['./bin/rebuild-package.sh'] if 'setup_script' in container_info: job_scripts.insert( 0, container_info['setup_script'] % pkg_compiler) job_dependencies = [] if spec_label in dependencies: job_dependencies = ( [get_job_name(spec_labels[dep_label]['spec'], osname) for dep_label in dependencies[spec_label]]) job_object = { 'stage': stage_name, 'variables': { 'MIRROR_URL': mirror_url, 'CDASH_BASE_URL': cdash_url, 'HASH': pkg_hash, 'DEPENDENCIES': ';'.join(job_dependencies), 'ROOT_SPEC': str(root_spec), }, 'script': job_scripts, 'image': build_image, 'artifacts': { 'paths': [ 'local_mirror/build_cache', 'jobs_scratch_dir', 'cdash_report', ], 'when': 'always', }, 'dependencies': job_dependencies, } # If we see 'compilers' in the container iformation, it's a # filter for the compilers this container can handle, else we # assume it can handle any compiler if 'compilers' in container_info: do_job = False for item in container_info['compilers']: container_compiler_spec = CompilerSpec(item['name']) if pkg_compiler == container_compiler_spec: do_job = True else: do_job = True if args.shared_runner_tag: job_object['tags'] = [args.shared_runner_tag] if args.signing_key: job_object['variables']['SIGN_KEY_HASH'] = args.signing_key if do_job: output_object[job_name] = job_object job_count += 1 stage += 1 tty.msg('{0} build jobs generated in {1} stages'.format( job_count, len(stages))) final_stage = 'stage-rebuild-index' final_job = { 'stage': final_stage, 'variables': { 'MIRROR_URL': mirror_url, }, 'image': build_image, 'script': './bin/rebuild-index.sh', } if args.shared_runner_tag: final_job['tags'] = [args.shared_runner_tag] output_object['rebuild-index'] = final_job stage_names.append(final_stage) output_object['stages'] = stage_names with open(args.output_file, 'w') as outf: outf.write(syaml.dump(output_object))