def test_against_standard(self, standard_pars): """Compare a generated association against a standard Success is when no other AssertionError occurs. """ if standard_pars.xfail is not None: pytest.xfail(reason=standard_pars.xfail) # Create the associations generated_path = Path('generate') generated_path.mkdir() version_id = standard_pars.pool_root.replace('_', '-') args = standard_pars.main_args + [ '-p', str(generated_path), '--version-id', version_id, ] pool = combine_pools( [t_path(Path('data') / (standard_pars.pool_root + '.csv'))]) Main(args, pool=pool) # Retrieve the truth files truth_paths = [ self.get_data(truth_path) for truth_path in self.data_glob( *self.ref_loc, glob='*_' + version_id + '_*.json') ] # Compare the association sets. try: compare_asn_files(generated_path.glob('*.json'), truth_paths) except AssertionError: if standard_pars.xfail: pytest.xfail(standard_pars.xfail) else: raise
def test_nrs_msa_nod(): pool = combine_pools(t_path('data/pool_023_nirspec_msa_3nod.csv')) all_candidates = constrain_on_candidates(None) asns = generate(pool, registry_level2_only(global_constraints=all_candidates)) assert len(asns) == 12 for asn in asns: assert len(asn['products'][0]['members']) == 3
def miri_params(request): cid, asn_type, asn_name, product_name = request.param pool = combine_pools(t_path('data/pool_007_spec_miri.csv')) gc = constrain_on_candidates((cid, )) rules = registry_level3_only(global_constraints=gc) asns = generate(pool, rules) return asns, asn_type, asn_name, product_name
def test_item(): pool = combine_pools(t_path('data/pool_013_coron_nircam.csv')) item1 = ProcessItem(pool[0]) item2 = ProcessItem(pool[1]) assert item1 == item1 assert item1 != item2 s = set([item1, item2]) assert len(s) == 2
def test_level2_asn_names_with_version(pool_params): pool_path = helpers.t_path(pool_params) pool = helpers.combine_pools(pool_path) rules = helpers.registry_level2_only(global_constraints=all_candidates) asns = generate(pool, rules, version_id=True) assert len(asns) > 0 for asn in asns: name = asn.asn_name m = re.match(LEVEL3_ASN_WITH_VERSION, name) assert m is not None
def test_global_constraints(constraints, pool, n_asns): """Test that global constraints get applied to all rules""" rules = AssociationRegistry(global_constraints=constraints) assert len(rules) >= 3 for constraint in constraints: for rule in rules: assert constraint in rules[rule].GLOBAL_CONSTRAINT pool = helpers.combine_pools(pool) asns = generate(pool, rules) assert len(asns) == n_asns
def test_level3_productname_components_discovered(): rules = registry_level3_only() pool = combine_pools(t_path('data/pool_002_image_miri.csv')) asns = generate(pool, rules) asn = asns[0] match = re.match(LEVEL3_PRODUCT_NAME_REGEX, asn['products'][0]['name']) assert match is not None matches = match.groupdict() assert matches['program'] == '99009' assert matches['acid'] == 'a3001' assert matches['target'] == 't001' assert matches['instrument'] == 'miri' assert matches['opt_elem'] == 'f560w'
def test_nrs_fixedslit_nod(): """Test NIRSpec Fixed-slit background nods""" pool = combine_pools(t_path('data/pool_024_nirspec_fss_nods.csv')) constraint_all_candidates = constrain_on_candidates(None) asns = generate(pool, registry_level2_only( global_constraints=constraint_all_candidates) ) assert len(asns) == 30 for asn in asns: n_dithers = int(asn.constraints['nods'].value) n_spectral_dithers = int(asn.constraints['subpxpts'].value) # Expect self + all exposures not at the same primary dither n_members = n_dithers - n_spectral_dithers + 1 assert len(asn['products'][0]['members']) == n_members
def test_nrs_fixedslit_nod(): """Test NIRSpec Fixed-slit background nods""" pool = combine_pools(t_path('data/pool_024_nirspec_fss_nods.csv')) constraint_all_candidates = constrain_on_candidates(None) asns = generate( pool, registry_level2_only(global_constraints=constraint_all_candidates)) assert len(asns) == 30 for asn in asns: nods = int(asn.constraints['nods'].value) multiplier = DITHER_PATTERN_MULTIPLIER[ asn.constraints['subpxpts'].value] n_members = nods * multiplier assert len(asn['products'][0]['members']) == n_members
def test_targacq(pool_file): """Test for existence of target acquisitions in associatons""" rules = registry_level3_only() pool = combine_pools(t_path(pool_file)) asns = generate(pool, rules) assert len(asns) > 0 for asn in asns: # Ignore reprocessed asn's with only science if not asn['asn_rule'] in ["Asn_Lv3SpecAux", "Asn_Lv3NRSIFUBackground"]: for product in asn['products']: exptypes = [ member['exptype'].lower() for member in product['members'] ] assert 'target_acquisition' in exptypes
def test_meta(): rules = registry_level3_only() pool = combine_pools(t_path('data/pool_002_image_miri.csv')) asns = generate(pool, rules) assert len(asns) == 1 asn = asns[0] data = asn.data assert data['program'] == '99009' assert data['target'] == 't001' assert data['asn_type'] == 'image3' assert data['asn_id'] == 'a3001' assert data['asn_pool'] == 'pool_002_image_miri' assert data['asn_rule'] == 'Asn_Lv3Image' assert data['degraded_status'] == 'No known degraded exposures in association.' assert data['version_id'] is None assert data['constraints'] is not None
def test_level2_asn_names(pool_params): pool_path = helpers.t_path(pool_params) pool = helpers.combine_pools(pool_path) rules = helpers.registry_level2_only(global_constraints=all_candidates) asns = generate(pool, rules) assert len(asns) > 0 for asn in asns: name = asn.asn_name if any( getattr(c, 'is_acid', False) for c in asn.constraints ): m = re.match(LEVEL3_ASN_ACID_NAME_REGEX, name) else: m = re.match(LEVEL3_ASN_DISCOVERED_NAME_REGEX, name) assert m is not None
def test_nrs_fixedslit_nod_chop(): """Test NIRSpec Fixed-slit background nods""" pool = combine_pools(t_path('data/pool_025_nirspec_fss_nod_chop.csv')) constraint_all_candidates = constrain_on_candidates(None) asns = generate(pool, registry_level2_only( global_constraints=constraint_all_candidates) ) assert len(asns) == 8 for asn in asns: assert asn['asn_rule'] in ['Asn_Lv2NRSFSS', 'Asn_Lv2SpecSpecial'] if asn['asn_rule'] == 'Asn_Lv2SpecSpecial': assert len(asn['products'][0]['members']) == 1 else: nods = int(asn.constraints['nods'].value) if asn['asn_id'].startswith('c'): nods += 1 assert len(asn['products'][0]['members']) == nods
def test_exposerr(): pool = combine_pools(t_path('data/pool_008_exposerr.csv')) generated = Main([ '--dry-run', '-i', 'o001', ], pool=pool) asns = generated.associations assert len(asns) > 1 for asn in asns: any_degraded = False for product in asn['products']: any_degraded = any_degraded or any([ member['exposerr'] not in _EMPTY for member in product['members'] ]) if any_degraded: assert asn['degraded_status'] == _DEGRADED_STATUS_NOTOK else: assert asn['degraded_status'] == _DEGRADED_STATUS_OK
def test_level3_productname_components_acid(): global_constraints = DMSAttrConstraint( name='asn_candidate_ids', value='.+o001.+', sources=['asn_candidate'], force_unique=True, is_acid=True, evaluate=True, ) rules = registry_level3_only(global_constraints=global_constraints) pool = combine_pools(t_path('data/pool_002_image_miri.csv')) asns = generate(pool, rules) asn = asns[0] match = re.match(LEVEL3_PRODUCT_NAME_REGEX, asn['products'][0]['name']) assert match is not None matches = match.groupdict() assert matches['program'] == '99009' assert matches['acid'] == 'o001' assert matches['target'] == 't001' assert matches['instrument'] == 'miri' assert matches['opt_elem'] == 'f560w'
def test_against_standard(self, standard_pars): """Compare a generated assocaition against a standard Success is when no other AssertionError occurs. """ if standard_pars.xfail is not None: pytest.xfail(reason=standard_pars.xfail) # Create the associations generated_path = Path('generate') generated_path.mkdir() version_id = standard_pars.pool_root.replace('_', '-') args = TEST_ARGS + standard_pars.main_args + [ '-p', str(generated_path), '--version-id', version_id, ] pool = combine_pools([ t_path(Path('data') / (standard_pars.pool_root + '.csv')) ]) Main(args, pool=pool ) # Retrieve the truth files truth_paths = [ self.get_data(truth_path) for truth_path in self.data_glob(*self.ref_loc, glob='*_' + version_id + '_*.json') ] # Compare the association sets. try: compare_asn_files(generated_path.glob('*.json'), truth_paths) except AssertionError as error: if 'Associations do not share a common set of products' in str(error): pytest.xfail('Issue #3039') elif 'Associations have the following product name duplication' in str(error): pytest.xfail('Issue #3041') else: raise
def pool(): """Retreive pool path""" pool_path = t_path(os.path.join('data', POOL_PATH)) pool = combine_pools(pool_path) return pool
def test_item_iterable(): pool = combine_pools(t_path('data/pool_013_coron_nircam.csv')) process_items = ProcessItem.to_process_items(pool) for process_item in process_items: assert isinstance(process_item, ProcessItem)
"""test_level3_dithers: Test of WFS rules.""" from jwst.associations.tests import helpers from jwst.associations import generate from jwst.associations.main import constrain_on_candidates # Generate Level3 assocations all_candidates = constrain_on_candidates(None) rules = helpers.registry_level3_only(global_constraints=all_candidates) pool = helpers.combine_pools(helpers.t_path('data/pool_004_wfs.csv')) level3_asns = generate(pool, rules) class TestLevel3WFS(helpers.BasePoolRule): pools = [ helpers.PoolParams(path=helpers.t_path('data/pool_004_wfs.csv'), n_asns=42, n_orphaned=0), ] valid_rules = [ 'Asn_Lv3WFSCMB', ] def test_wfs_duplicate_product_names(): """Test for duplicate product names""" global level3_asns
def generate_from_pool(pool_path): """Generate associations from pools""" rules = registry_level2_only() pool = combine_pools(t_path(pool_path)) asns = generate(pool, rules) return asns