def test_dup_product_names(self, pool_path): """Check for duplicate product names for a pool""" pool = Path(pool_path).stem special = SPECIAL_POOLS.get(pool, SPECIAL_DEFAULT) results = asn_generate(['--dry-run', self.get_data(pool_path)]) asns = results.associations product_names = Counter(product['name'] for asn in asns for product in asn['products']) multiples = [ product_name for product_name, count in product_names.items() if count > 1 ] try: assert not multiples, 'Multiple product names: {}'.format( multiples) except AssertionError: if special['xfail']: pytest.xfail(special['xfail']) else: raise
def test_against_standard(self, pool_path, slow): """Compare a generated association against a standard Success is when no other AssertionError occurs. """ # Parse pool name pool = Path(pool_path).stem proposal, version_id = pool_regex.match(pool).group('proposal', 'versionid') special = SPECIAL_POOLS.get(pool, SPECIAL_DEFAULT) if special['slow'] and not slow: pytest.skip('Pool {pool} requires "--slow" option') # Create the generator running arguments generated_path = Path('generate') generated_path.mkdir() args = special['args'] + [ '-p', str(generated_path), '--version-id', version_id, self.get_data(pool_path) ] # Create the associations asn_generate(args) # Retrieve the truth files asn_regex = re.compile( r'.+{proposal}.+{version_id}(_[^_]+?_[^_]+?_asn\.json)$'.format( proposal=proposal, version_id=version_id ), flags=re.IGNORECASE ) truth_paths = [ self.get_data(truth_path) for truth_path in self.truth_paths if asn_regex.match(truth_path) ] # Compare the association sets. try: compare_asn_files(generated_path.glob('*.json'), truth_paths) except AssertionError: if special['xfail']: pytest.xfail(special['xfail']) else: raise
def test_against_standard(self, pool_path): """Compare a generated association against a standard Success is when no other AssertionError occurs. """ # Parse pool name pool = Path(pool_path).stem proposal, version_id = pool_regex.match(pool).group('proposal', 'versionid') # Create the associations generated_path = Path('generate') generated_path.mkdir() asn_generate([ '--no-merge', '-p', str(generated_path), '--version-id', version_id, self.get_data(pool_path) ]) # Retrieve the truth files asn_regex = re.compile( r'.+{proposal}.+{version_id}(_[^_]+?_[^_]+?_asn\.json)$'.format( proposal=proposal, version_id=version_id ), flags=re.IGNORECASE ) truth_paths = [ self.get_data(truth_path) for truth_path in asn_base.truth_paths if asn_regex.match(truth_path) ] # Compare the association sets. try: compare_asn_files(generated_path.glob('*.json'), truth_paths) except AssertionError as error: if 'Associations do not share a common set of products' in str(error): pytest.xfail('Issue #3039') else: raise
def test_against_standard(sdpdata_module, pool_path, slow): """Compare a generated association against a standard Success is when no other AssertionError occurs. """ # Parse pool name pool = Path(pool_path).stem proposal, version_id = pool_regex.match(pool).group( 'proposal', 'versionid') special = SPECIAL_POOLS.get(pool, SPECIAL_DEFAULT) if special['slow'] and not slow: pytest.skip(f'Pool {pool} requires "--slow" option') # Setup test path cwd = Path(pool) cwd.mkdir() with pushdir(cwd): # Create the generator running arguments output_path = Path(pool) output_path.mkdir() sdpdata_module.output = str(output_path) args = special['args'] + [ '-p', sdpdata_module.output, '--version-id', version_id, sdpdata_module.get_data(pool_path) ] # Create the associations asn_generate(args) # Compare to the truth associations. truth_paths = sdpdata_module.truth_paths(pool) try: compare_asn_files(output_path.glob('*.json'), truth_paths) except AssertionError: if special['xfail']: pytest.xfail(special['xfail']) else: raise
def test_against_standard(self, pool_path): """Compare a generated association against a standard Success is when no other AssertionError occurs. """ # Parse pool name pool = Path(pool_path).stem proposal, version_id = pool_regex.match(pool).group( 'proposal', 'versionid') # Create the associations generated_path = Path('generate') generated_path.mkdir() asn_generate([ '--no-merge', '-p', str(generated_path), '--version-id', version_id, self.get_data(pool_path) ]) # Retrieve the truth files asn_regex = re.compile( r'.+{proposal}.+{version_id}(_[^_]+?_[^_]+?_asn\.json)$'.format( proposal=proposal, version_id=version_id), flags=re.IGNORECASE) truth_paths = [ self.get_data(truth_path) for truth_path in ASN_BASE.truth_paths if asn_regex.match(truth_path) ] # Compare the association sets. try: compare_asn_files(generated_path.glob('*.json'), truth_paths) except AssertionError as error: if 'Associations do not share a common set of products' in str( error): pytest.xfail('Issue #3039') else: raise
def test_dup_product_names(self, pool_path): """Check for duplicate product names for a pool""" results = asn_generate( ['--dry-run', '--no-merge', self.get_data(pool_path)]) asns = results.associations product_names = Counter(product['name'] for asn in asns for product in asn['products']) multiples = [ product_name for product_name, count in product_names.items() if count > 1 ] assert not multiples, 'Multiple product names: {}'.format(multiples)
def test_dup_product_names(self, pool_path): """Check for duplicate product names for a pool""" results = asn_generate([ '--dry-run', '--no-merge', self.get_data(pool_path) ]) asns = results.associations product_names = Counter( product['name'] for asn in asns for product in asn['products'] ) multiples = [ product_name for product_name, count in product_names.items() if count > 1 ] assert not len(multiples), 'Multiple product names: {}'.format(multiples)