def test_nrs_msa_nod(): pool = combine_pools(t_path('data/pool_023_nirspec_msa_3nod.csv')) all_candidates = constrain_on_candidates(None) asns = generate(pool, registry_level2_only(global_constraints=all_candidates)) assert len(asns) == 12 for asn in asns: assert len(asn['products'][0]['members']) == 3
def full_pool_rules(request): """Setup to use the full example pool and registry""" pool_fname = t_path('data/mega_pool.csv') pool = AssociationPool.read(pool_fname) rules = AssociationRegistry() return (pool, rules, pool_fname)
def miri_params(request): cid, asn_type, asn_name, product_name = request.param pool = combine_pools(t_path('data/pool_007_spec_miri.csv')) gc = constrain_on_candidates((cid, )) rules = registry_level3_only(global_constraints=gc) asns = generate(pool, rules) return asns, asn_type, asn_name, product_name
def test_against_standard(self, standard_pars): """Compare a generated association against a standard Success is when no other AssertionError occurs. """ if standard_pars.xfail is not None: pytest.xfail(reason=standard_pars.xfail) # Create the associations generated_path = Path('generate') generated_path.mkdir() version_id = standard_pars.pool_root.replace('_', '-') args = standard_pars.main_args + [ '-p', str(generated_path), '--version-id', version_id, ] pool = combine_pools( [t_path(Path('data') / (standard_pars.pool_root + '.csv'))]) Main(args, pool=pool) # Retrieve the truth files truth_paths = [ self.get_data(truth_path) for truth_path in self.data_glob( *self.ref_loc, glob='*_' + version_id + '_*.json') ] # Compare the association sets. try: compare_asn_files(generated_path.glob('*.json'), truth_paths) except AssertionError: if standard_pars.xfail: pytest.xfail(standard_pars.xfail) else: raise
def test_multi_rules(): rule_files = [ helpers.t_path('data/asn_rules_set1.py'), helpers.t_path('data/asn_rules_set2.py') ] rules = AssociationRegistry(rule_files, include_default=False) assert len(rules) == 4 rule_names = helpers.get_rule_names(rules) assert 'DMS_Level3_Base_Set1' not in rule_names assert 'DMS_Level3_Base_Set2' not in rule_names valid_rules = [ 'Asn_Dither_Set1', 'Asn_Dither_Set2', 'Asn_WFS_Set1', 'Asn_WFS_Set2' ] for rule in valid_rules: assert rule in rule_names
def test_item(): pool = combine_pools(t_path('data/pool_013_coron_nircam.csv')) item1 = ProcessItem(pool[0]) item2 = ProcessItem(pool[1]) assert item1 == item1 assert item1 != item2 s = set([item1, item2]) assert len(s) == 2
def test_read_assoc_defs(): rules = AssociationRegistry([helpers.t_path('data/asn_rules_set1.py')], include_default=False) assert len(rules) >= 2 rule_names = helpers.get_rule_names(rules) assert 'DMS_Level3_Base_Set1' not in rules valid_rules = ['Asn_Dither_Set1', 'Asn_WFS_Set1'] for rule in valid_rules: assert rule in rule_names
def test_valid(): rules = AssociationRegistry() asn_file = helpers.t_path( 'data/test_image_asn.json' ) with open(asn_file, 'r') as asn_fp: asn = load_asn(asn_fp) valid_schema_list = rules.validate(asn) assert isinstance(valid_schema_list, list)
def test_include_bases(): """Test for included bases""" dms_test_rules_path = t_path(path.join('data', 'dms_rules.py')) dms_registry = AssociationRegistry([dms_test_rules_path], include_default=False, include_bases=True) assert len(dms_registry) > 1 assert 'DMSBaseMixin' in dms_registry
def test_level2_asn_names_with_version(pool_params): pool_path = helpers.t_path(pool_params) pool = helpers.combine_pools(pool_path) rules = helpers.registry_level2_only(global_constraints=all_candidates) asns = generate(pool, rules, version_id=True) assert len(asns) > 0 for asn in asns: name = asn.asn_name m = re.match(LEVEL3_ASN_WITH_VERSION, name) assert m is not None
def test_simple(): """Test generate on simple registry""" registry = AssociationRegistry([t_path('data/rules_basic.py')], include_default=False) pool = AssociationPool() pool['value'] = ['row1', 'row2'] asns = generate(pool, registry) assert len(asns) == 1 assert len(asns[0]['members']) == 2
class TestLevel3Spec(BasePoolRule): pools = [ PoolParams(path=t_path('data/pool_005_spec_niriss.csv'), n_asns=1, n_orphaned=0), PoolParams(path=t_path('data/pool_006_spec_nirspec.csv'), n_asns=3, n_orphaned=0), PoolParams(path=t_path('data/pool_007_spec_miri.csv'), n_asns=2, n_orphaned=0), PoolParams(path=t_path('data/pool_019_niriss_wfss.csv'), n_asns=2, n_orphaned=0), ] valid_rules = [ 'Asn_Lv3Image', ]
class TestLevel3WFS(helpers.BasePoolRule): pools = [ helpers.PoolParams(path=helpers.t_path('data/pool_004_wfs.csv'), n_asns=42, n_orphaned=0), ] valid_rules = [ 'Asn_Lv3WFSCMB', ]
def test_candidate_observation(partial_args, n_asns): with mkstemp_pool_file( t_path('data/pool_001_candidates.csv')) as pool_path: cmd_args = [ pool_path, '--dry-run', '-r', level3_rule_path(), '--ignore-default', ] cmd_args.extend(partial_args) generated = Main(cmd_args) assert len(generated.associations) == n_asns
def test_level3_productname_components_discovered(): rules = registry_level3_only() pool = combine_pools(t_path('data/pool_002_image_miri.csv')) asns = generate(pool, rules) asn = asns[0] match = re.match(LEVEL3_PRODUCT_NAME_REGEX, asn['products'][0]['name']) assert match is not None matches = match.groupdict() assert matches['program'] == '99009' assert matches['acid'] == 'a3001' assert matches['target'] == 't001' assert matches['instrument'] == 'miri' assert matches['opt_elem'] == 'f560w'
def test_nrs_fixedslit_nod(): """Test NIRSpec Fixed-slit background nods""" pool = combine_pools(t_path('data/pool_024_nirspec_fss_nods.csv')) constraint_all_candidates = constrain_on_candidates(None) asns = generate(pool, registry_level2_only( global_constraints=constraint_all_candidates) ) assert len(asns) == 30 for asn in asns: n_dithers = int(asn.constraints['nods'].value) n_spectral_dithers = int(asn.constraints['subpxpts'].value) # Expect self + all exposures not at the same primary dither n_members = n_dithers - n_spectral_dithers + 1 assert len(asn['products'][0]['members']) == n_members
def test_candidate_observation_caseagnostic(partial_args, n_asns): """Use the extensive candidate test as a test for case""" with mkstemp_pool_file( t_path('data/pool_001_candidates_lower.csv') ) as pool_path: cmd_args = [ pool_path, '--dry-run', '-r', level3_rule_path(), '--ignore-default', ] cmd_args.extend(partial_args) generated = Main(cmd_args) assert len(generated.associations) == n_asns
class TestLevel3Image(BasePoolRule): pools = [ PoolParams(path=t_path('data/pool_002_image_miri.csv'), n_asns=1, n_orphaned=0), PoolParams(path=t_path('data/pool_003_image_nircam.csv'), n_asns=2, n_orphaned=0), # Below tested cannot be run due to an obscure numpy.ma bug. #PoolParams( # path=[ # t_path('data/pool_002_image_miri.csv'), # t_path('data/pool_003_image_nircam.csv'), # ], # n_asns=3, # n_orphaned=0 #), ] valid_rules = [ 'Asn_Lv3Image', ]
def test_nrs_fixedslit_nod(): """Test NIRSpec Fixed-slit background nods""" pool = combine_pools(t_path('data/pool_024_nirspec_fss_nods.csv')) constraint_all_candidates = constrain_on_candidates(None) asns = generate( pool, registry_level2_only(global_constraints=constraint_all_candidates)) assert len(asns) == 30 for asn in asns: nods = int(asn.constraints['nods'].value) multiplier = DITHER_PATTERN_MULTIPLIER[ asn.constraints['subpxpts'].value] n_members = nods * multiplier assert len(asn['products'][0]['members']) == n_members
def test_duplicate_names(): """ For Level 3 association, there should be no association with the same product name. Generation should produce log messages indicating when duplicate names have been found. """ pool = AssociationPool.read(t_path('data/jw00632_dups.csv')) constrain_all_candidates = constrain_on_candidates(None) rules = registry_level3_only(global_constraints=constrain_all_candidates) with pytest.warns(RuntimeWarning): asns = generate(pool, rules) # There should only be one association left. assert len(asns) == 1
def test_targacq(pool_file): """Test for existence of target acquisitions in associatons""" rules = registry_level3_only() pool = combine_pools(t_path(pool_file)) asns = generate(pool, rules) assert len(asns) > 0 for asn in asns: # Ignore reprocessed asn's with only science if not asn['asn_rule'] in ["Asn_Lv3SpecAux", "Asn_Lv3NRSIFUBackground"]: for product in asn['products']: exptypes = [ member['exptype'].lower() for member in product['members'] ] assert 'target_acquisition' in exptypes
def test_level2_asn_names(pool_params): pool_path = helpers.t_path(pool_params) pool = helpers.combine_pools(pool_path) rules = helpers.registry_level2_only(global_constraints=all_candidates) asns = generate(pool, rules) assert len(asns) > 0 for asn in asns: name = asn.asn_name if any( getattr(c, 'is_acid', False) for c in asn.constraints ): m = re.match(LEVEL3_ASN_ACID_NAME_REGEX, name) else: m = re.match(LEVEL3_ASN_DISCOVERED_NAME_REGEX, name) assert m is not None
def test_duplicate_main(): """Test the same but with Main """ cmd_args = [ t_path('data/pool_duplicate.csv'), '--dry-run', '-r', level3_rule_path(), '--ignore-default' ] generated = Main(cmd_args) asns = generated.associations assert len(asns) == 2 asn_types = set([asn['asn_type'] for asn in asns]) assert len(asn_types) == 1 assert 'image3' in asn_types asn_ids = set([asn['asn_id'] for asn in asns]) assert asn_ids == set(('a3001', 'o029'))
def test_meta(): rules = registry_level3_only() pool = combine_pools(t_path('data/pool_002_image_miri.csv')) asns = generate(pool, rules) assert len(asns) == 1 asn = asns[0] data = asn.data assert data['program'] == '99009' assert data['target'] == 't001' assert data['asn_type'] == 'image3' assert data['asn_id'] == 'a3001' assert data['asn_pool'] == 'pool_002_image_miri' assert data['asn_rule'] == 'Asn_Lv3Image' assert data['degraded_status'] == 'No known degraded exposures in association.' assert data['version_id'] is None assert data['constraints'] is not None
def test_nrs_fixedslit_nod_chop(): """Test NIRSpec Fixed-slit background nods""" pool = combine_pools(t_path('data/pool_025_nirspec_fss_nod_chop.csv')) constraint_all_candidates = constrain_on_candidates(None) asns = generate(pool, registry_level2_only( global_constraints=constraint_all_candidates) ) assert len(asns) == 8 for asn in asns: assert asn['asn_rule'] in ['Asn_Lv2NRSFSS', 'Asn_Lv2SpecSpecial'] if asn['asn_rule'] == 'Asn_Lv2SpecSpecial': assert len(asn['products'][0]['members']) == 1 else: nods = int(asn.constraints['nods'].value) if asn['asn_id'].startswith('c'): nods += 1 assert len(asn['products'][0]['members']) == nods
def cmd_from_pool(pool_path, args): """Run commandline on pool Parameters --------- pool_path: str The pool to run on. args: [arg(, ...)] Additional command line arguments in the form `sys.argv` """ full_args = [ '--dry-run', '-D', '-r', t_path('../lib/rules_level2b.py'), '--ignore-default' ] full_args.extend(args) result = Main(full_args, pool=pool_path) return result
def test_exposerr(): pool = combine_pools(t_path('data/pool_008_exposerr.csv')) generated = Main([ '--dry-run', '-i', 'o001', ], pool=pool) asns = generated.associations assert len(asns) > 1 for asn in asns: any_degraded = False for product in asn['products']: any_degraded = any_degraded or any([ member['exposerr'] not in _EMPTY for member in product['members'] ]) if any_degraded: assert asn['degraded_status'] == _DEGRADED_STATUS_NOTOK else: assert asn['degraded_status'] == _DEGRADED_STATUS_OK
def test_niriss_wfss(): """Test association properties for NIRISS WFSS""" pool = AssociationPool.read( t_path(path.join('data', 'jw87800_20180412T163456_pool.csv')) ) cmd_args = [ '--dry-run', '--D' ] results = Main( cmd_args, pool=pool ) asns = results.associations # Need 4 associations: image2, spec2, image3, spec3 assert len(asns) == 12 asn_types = [ asn['asn_type'] for asn in asns ] assert REQUIRED_ASN_TYPES == set(asn_types) # Arrange associations by type asn_by_type = { asn['asn_type']: asn for asn in asns } # Ensure catalog and segmentation map names are correct in the spec2 associations l3name = asn_by_type['image3']['products'][0]['name'] source_cat = l3name + '_cat.ecsv' segmap = l3name + '_segm.fits' for product in asn_by_type['spec2']['products']: members_by_type = { member['exptype']: member for member in product['members'] } assert members_by_type['sourcecat']['expname'] == source_cat assert members_by_type['segmap']['expname'] == segmap
def test_level3_productname_components_acid(): global_constraints = DMSAttrConstraint( name='asn_candidate_ids', value='.+o001.+', sources=['asn_candidate'], force_unique=True, is_acid=True, evaluate=True, ) rules = registry_level3_only(global_constraints=global_constraints) pool = combine_pools(t_path('data/pool_002_image_miri.csv')) asns = generate(pool, rules) asn = asns[0] match = re.match(LEVEL3_PRODUCT_NAME_REGEX, asn['products'][0]['name']) assert match is not None matches = match.groupdict() assert matches['program'] == '99009' assert matches['acid'] == 'o001' assert matches['target'] == 't001' assert matches['instrument'] == 'miri' assert matches['opt_elem'] == 'f560w'
def test_duplicate_generate(): """Test for duplicate/overwrite association The pool has two exposures, one without a valid `asn_candidate`, and one with a valid observation `asn_candidate`. When set with the "all candidates" constraint, only one association should be made. The prompt for this test was that three associations were being created, two of which were the observation candidate, with the second being a duplicate of the first. The third was an extraneous discovered candidate. """ pool = AssociationPool.read(t_path('data/pool_duplicate.csv')) constrain_all_candidates = constrain_on_candidates(None) rules = registry_level3_only(global_constraints=constrain_all_candidates) asns = generate(pool, rules) assert len(asns) == 1 asn = asns[0] assert asn['asn_type'] == 'image3' assert asn['asn_id'] == 'o029'
def test_against_standard(self, standard_pars): """Compare a generated assocaition against a standard Success is when no other AssertionError occurs. """ if standard_pars.xfail is not None: pytest.xfail(reason=standard_pars.xfail) # Create the associations generated_path = Path('generate') generated_path.mkdir() version_id = standard_pars.pool_root.replace('_', '-') args = TEST_ARGS + standard_pars.main_args + [ '-p', str(generated_path), '--version-id', version_id, ] pool = combine_pools([ t_path(Path('data') / (standard_pars.pool_root + '.csv')) ]) Main(args, pool=pool ) # Retrieve the truth files truth_paths = [ self.get_data(truth_path) for truth_path in self.data_glob(*self.ref_loc, glob='*_' + version_id + '_*.json') ] # Compare the association sets. try: compare_asn_files(generated_path.glob('*.json'), truth_paths) except AssertionError as error: if 'Associations do not share a common set of products' in str(error): pytest.xfail('Issue #3039') elif 'Associations have the following product name duplication' in str(error): pytest.xfail('Issue #3041') else: raise
) from jwst.tests.base_classes import BaseJWSTTest from jwst.associations.main import Main # ################# # Setup environment # ################# # Main test args TEST_ARGS = ['--no-merge'] # Produce Level2b only associations LV2_ONLY_ARGS = [ '-r', t_path('../lib/rules_level2b.py'), '--ignore-default', '--no-merge' ] # Produce Level3 only associations LV3_ONLY_ARGS = [ '-r', t_path('../lib/rules_level3.py'), '--ignore-default', '--no-merge' ] # Produce general associations DEF_ARGS = []