def test_ordered_read_not_required_for_consistent_dag_hash( config, mock_packages): """Make sure ordered serialization isn't required to preserve hashes. For consistent hashes, we require that YAML and json documents have their keys serialized in a deterministic order. However, we don't want to require them to be serialized in order. This ensures that is not required. """ specs = ['mpileaks ^zmpi', 'dttop', 'dtuse'] for spec in specs: spec = Spec(spec) spec.concretize() # # Dict & corresponding YAML & JSON from the original spec. # spec_dict = spec.to_dict() spec_yaml = spec.to_yaml() spec_json = spec.to_json() # # Make a spec with reversed OrderedDicts for every # OrderedDict in the original. # reversed_spec_dict = reverse_all_dicts(spec.to_dict()) # # Dump to YAML and JSON # yaml_string = syaml.dump(spec_dict, default_flow_style=False) reversed_yaml_string = syaml.dump(reversed_spec_dict, default_flow_style=False) json_string = sjson.dump(spec_dict) reversed_json_string = sjson.dump(reversed_spec_dict) # # Do many consistency checks # # spec yaml is ordered like the spec dict assert yaml_string == spec_yaml assert json_string == spec_json # reversed string is different from the original, so it # *would* generate a different hash assert yaml_string != reversed_yaml_string assert json_string != reversed_json_string # build specs from the "wrongly" ordered data round_trip_yaml_spec = Spec.from_yaml(yaml_string) round_trip_json_spec = Spec.from_json(json_string) round_trip_reversed_yaml_spec = Spec.from_yaml(reversed_yaml_string) round_trip_reversed_json_spec = Spec.from_yaml(reversed_json_string) # TODO: remove this when build deps are in provenance. spec = spec.copy(deps=('link', 'run')) # specs are equal to the original assert spec == round_trip_yaml_spec assert spec == round_trip_json_spec assert spec == round_trip_reversed_yaml_spec assert spec == round_trip_reversed_json_spec assert round_trip_yaml_spec == round_trip_reversed_yaml_spec assert round_trip_json_spec == round_trip_reversed_json_spec # dag_hashes are equal assert spec.dag_hash() == round_trip_yaml_spec.dag_hash() assert spec.dag_hash() == round_trip_json_spec.dag_hash() assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash() assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash() # full_hashes are equal spec.concretize() round_trip_yaml_spec.concretize() round_trip_json_spec.concretize() round_trip_reversed_yaml_spec.concretize() round_trip_reversed_json_spec.concretize() assert spec.full_hash() == round_trip_yaml_spec.full_hash() assert spec.full_hash() == round_trip_json_spec.full_hash() assert spec.full_hash() == round_trip_reversed_yaml_spec.full_hash() assert spec.full_hash() == round_trip_reversed_json_spec.full_hash()
def test_compiler_child(self): s = Spec('mpileaks%clang ^dyninst%gcc') s.concretize() assert s['mpileaks'].satisfies('%clang') assert s['dyninst'].satisfies('%gcc')
def test_target_ranges_in_conflicts(self): with pytest.raises(spack.error.SpackError): Spec('impossible-concretization').concretized()
def test_no_matching_compiler_specs(self): s = Spec('a %[email protected]') with pytest.raises(spack.concretize.UnavailableCompilerVersionError): s.concretize()
def test_my_dep_depends_on_provider_of_my_virtual_dep(self): spec = Spec('indirect-mpich') spec.normalize() spec.concretize()
def test_concretize_two_virtuals(self): """Test a package with multiple virtual dependencies.""" Spec('hypre').concretize()
def test_concretize_two_virtuals_with_two_bound(self): """Test a package with multiple virtual deps and two of them preset.""" Spec('hypre ^openblas ^netlib-lapack').concretize()
def release_jobs(parser, args): share_path = os.path.join(spack_root, 'share', 'spack', 'docker') os_container_mapping_path = os.path.join( share_path, 'os-container-mapping.yaml') with open(os_container_mapping_path, 'r') as fin: os_container_mapping = syaml.load(fin) try: validate(os_container_mapping, mapping_schema) except ValidationError as val_err: tty.error('Ill-formed os-container-mapping configuration object') tty.error(os_container_mapping) tty.debug(val_err) return containers = os_container_mapping['containers'] if args.specs: # Just print out the spec labels and all dependency edges in # a json format. spec_list = [Spec(s) for s in args.specs] with open(args.specs_deps_output, 'w') as out: compute_spec_deps(spec_list, out) return current_system = sys_type() if args.resolve_deps_locally else None release_specs_path = args.spec_set if not release_specs_path: raise SpackError('Must provide path to release spec-set') release_spec_set = CombinatorialSpecSet.from_file(release_specs_path) mirror_url = args.mirror_url if not mirror_url: raise SpackError('Must provide url of target binary mirror') cdash_url = args.cdash_url spec_labels, dependencies, stages = stage_spec_jobs( release_spec_set, containers, current_system) if not stages: tty.msg('No jobs staged, exiting.') return if args.print_summary: print_staging_summary(spec_labels, dependencies, stages) output_object = {} job_count = 0 stage_names = ['stage-{0}'.format(i) for i in range(len(stages))] stage = 0 for stage_jobs in stages: stage_name = stage_names[stage] for spec_label in stage_jobs: release_spec = spec_labels[spec_label]['spec'] root_spec = spec_labels[spec_label]['rootSpec'] pkg_compiler = release_spec.compiler pkg_hash = release_spec.dag_hash() osname = str(release_spec.architecture) job_name = get_job_name(release_spec, osname) container_info = containers[osname] build_image = container_info['image'] job_scripts = ['./bin/rebuild-package.sh'] if 'setup_script' in container_info: job_scripts.insert( 0, container_info['setup_script'] % pkg_compiler) job_dependencies = [] if spec_label in dependencies: job_dependencies = ( [get_job_name(spec_labels[dep_label]['spec'], osname) for dep_label in dependencies[spec_label]]) job_object = { 'stage': stage_name, 'variables': { 'MIRROR_URL': mirror_url, 'CDASH_BASE_URL': cdash_url, 'HASH': pkg_hash, 'DEPENDENCIES': ';'.join(job_dependencies), 'ROOT_SPEC': str(root_spec), }, 'script': job_scripts, 'image': build_image, 'artifacts': { 'paths': [ 'local_mirror/build_cache', 'jobs_scratch_dir', 'cdash_report', ], 'when': 'always', }, 'dependencies': job_dependencies, } # If we see 'compilers' in the container iformation, it's a # filter for the compilers this container can handle, else we # assume it can handle any compiler if 'compilers' in container_info: do_job = False for item in container_info['compilers']: container_compiler_spec = CompilerSpec(item['name']) if pkg_compiler == container_compiler_spec: do_job = True else: do_job = True if args.shared_runner_tag: job_object['tags'] = [args.shared_runner_tag] if args.signing_key: job_object['variables']['SIGN_KEY_HASH'] = args.signing_key if do_job: output_object[job_name] = job_object job_count += 1 stage += 1 tty.msg('{0} build jobs generated in {1} stages'.format( job_count, len(stages))) final_stage = 'stage-rebuild-index' final_job = { 'stage': final_stage, 'variables': { 'MIRROR_URL': mirror_url, }, 'image': build_image, 'script': './bin/rebuild-index.sh', } if args.shared_runner_tag: final_job['tags'] = [args.shared_runner_tag] output_object['rebuild-index'] = final_job stage_names.append(final_stage) output_object['stages'] = stage_names with open(args.output_file, 'w') as outf: outf.write(syaml.dump(output_object))
def test_normalize_a_lot(self): spec = Spec('mpileaks') spec.normalize() spec.normalize() spec.normalize() spec.normalize()
def test_concrete_spec(config, mock_packages): spec = Spec('mpileaks+debug~opt') spec.concretize() check_yaml_round_trip(spec)
def test_yaml_multivalue(): spec = Spec('multivalue_variant foo="bar,baz"') spec.concretize() check_yaml_round_trip(spec)
def test_ambiguous_version_spec(mock_packages): spec = Spec('[email protected]:5.0,6.1,7.3+debug~opt') spec.normalize() check_yaml_round_trip(spec)
def test_normal_spec(mock_packages): spec = Spec('mpileaks+debug~opt') spec.normalize() check_yaml_round_trip(spec)
def test_simple_spec(): spec = Spec('mpileaks') check_yaml_round_trip(spec)
def test_compiler_flags_from_user_are_grouped(self): spec = Spec('a%gcc cflags="-O -foo-flag foo-val" platform=test') spec.concretize() cflags = spec.compiler_flags['cflags'] assert any(x == '-foo-flag foo-val' for x in cflags)
def test_unsatisfiable_version(self, set_dependency): set_dependency('mpileaks', '[email protected]') spec = Spec('mpileaks ^[email protected] ^callpath ^dyninst ^libelf ^libdwarf') with pytest.raises(spack.spec.UnsatisfiableVersionSpecError): spec.normalize()
def concretize_multi_provider(self): s = Spec('mpileaks ^[email protected]') s.concretize() assert s['mpi'].version == ver('1.10.3')
def test_unsatisfiable_compiler_version(self, set_dependency): set_dependency('mpileaks', 'mpich%[email protected]') spec = Spec('mpileaks ^mpich%[email protected] ^callpath ^dyninst ^libelf' ' ^libdwarf') with pytest.raises(spack.spec.UnsatisfiableCompilerSpecError): spec.normalize()
def test_concretize_two_virtuals_with_one_bound(self, refresh_builtin_mock): """Test a package with multiple virtual dependencies and one preset.""" Spec('hypre ^openblas').concretize()
def test_unsatisfiable_architecture(self, set_dependency): set_dependency('mpileaks', 'mpich platform=test target=be') spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath' ' ^dyninst ^libelf ^libdwarf') with pytest.raises(spack.spec.UnsatisfiableArchitectureSpecError): spec.normalize()
def test_concretize_two_virtuals_with_dual_provider(self): """Test a package with multiple virtual dependencies and force a provider that provides both. """ Spec('hypre ^openblas-with-lapack').concretize()
def test_invalid_dep(self, spec_str): spec = Spec(spec_str) with pytest.raises(spack.error.SpecError): spec.concretize()
def test_no_compilers_for_arch(self): s = Spec('a arch=linux-rhel0-x86_64') with pytest.raises(spack.concretize.NoCompilersForArchError): s.concretize()
def test_normalize_mpileaks(self): # Spec parsed in from a string spec = Spec.from_literal({ 'mpileaks ^mpich ^callpath ^dyninst ^[email protected] ^libdwarf': None }) # What that spec should look like after parsing expected_flat = Spec.from_literal({ 'mpileaks': { 'mpich': None, 'callpath': None, 'dyninst': None, '[email protected]': None, 'libdwarf': None } }) # What it should look like after normalization mpich = Spec('mpich') libelf = Spec('[email protected]') expected_normalized = Spec.from_literal({ 'mpileaks': { 'callpath': { 'dyninst': { 'libdwarf': { libelf: None }, libelf: None }, mpich: None }, mpich: None }, }) # Similar to normalized spec, but now with copies of the same # libelf node. Normalization should result in a single unique # node for each package, so this is the wrong DAG. non_unique_nodes = Spec.from_literal( { 'mpileaks': { 'callpath': { 'dyninst': { 'libdwarf': { '[email protected]': None }, '[email protected]': None }, mpich: None }, mpich: None } }, normal=False) # All specs here should be equal under regular equality specs = (spec, expected_flat, expected_normalized, non_unique_nodes) for lhs, rhs in zip(specs, specs): assert lhs == rhs assert str(lhs) == str(rhs) # Test that equal and equal_dag are doing the right thing assert spec == expected_flat assert spec.eq_dag(expected_flat) # Normalized has different DAG structure, so NOT equal. assert spec != expected_normalized assert not spec.eq_dag(expected_normalized) # Again, different DAG structure so not equal. assert spec != non_unique_nodes assert not spec.eq_dag(non_unique_nodes) spec.normalize() # After normalizing, spec_dag_equal should match the normalized spec. assert spec != expected_flat assert not spec.eq_dag(expected_flat) # verify DAG structure without deptypes. assert spec.eq_dag(expected_normalized, deptypes=False) assert not spec.eq_dag(non_unique_nodes, deptypes=False) assert not spec.eq_dag(expected_normalized, deptypes=True) assert not spec.eq_dag(non_unique_nodes, deptypes=True)
def test_external_package(self): spec = Spec('externaltool%gcc') spec.concretize() assert spec['externaltool'].external_path == '/path/to/external_tool' assert 'externalprereq' not in spec assert spec['externaltool'].compiler.satisfies('gcc')
def test_copy_dependencies(self): s1 = Spec('mpileaks ^[email protected]') s2 = s1.copy() assert '^[email protected]' in s2 assert '^mpich2' in s2
def test_normalize(spec_and_expected, config, mock_packages): spec, expected = spec_and_expected spec = Spec(spec) spec.normalize() assert spec.eq_dag(expected, deptypes=False)
def test_concretize_deptypes(self): """Ensure that dependency types are preserved after concretization.""" s = Spec('dt-diamond') s.concretize() self.check_diamond_deptypes(s)
def test_custom_compiler_version(self): if spack.config.get('config:concretizer') == 'original': pytest.xfail('Known failure of the original concretizer') s = Spec('a %gcc@foo os=redhat6').concretized() assert '%gcc@foo' in s
def test_concrete_spec(config, builtin_mock): spec = Spec('mpileaks+debug~opt') spec.concretize() check_yaml_round_trip(spec)