def test_roundtrip(make_asns): generated, path, asn_format = make_asns asn_files = glob(os.path.join(path, '*.' + asn_format)) assert len(asn_files) == len(generated.associations) for asn_file in asn_files: with open(asn_file, 'r') as asn_fp: load_asn(asn_fp) orphaned_files = glob(os.path.join(path, '*.csv')) assert len(orphaned_files) == 1 orphaned = Table.read(orphaned_files[0], format='ascii', delimiter='|') assert len(orphaned) == len(generated.orphaned)
def test_save_source_only(_bigdata): """Test saving the source-based files only""" datapath = path.join(_bigdata, 'nirspec', 'test_datasets', 'fss', '93045', 'level2b') pipe = Spec3Pipeline() pipe.mrs_imatch.skip = True pipe.outlier_detection.skip = True pipe.resample_spec.skip = True pipe.cube_build.skip = True pipe.extract_1d.skip = True asn_path = path.join(datapath, 'jw93045-o010_20180725t035735_spec3_001_asn.json') pipe.run(asn_path) # Check resulting product with open(asn_path) as fh: asn = load_asn(fh) base_name = asn['products'][0]['name'] product_name = base_name.format(source_id='ss400a1') + '_cal.fits' output_files = glob('*') if product_name in output_files: output_files.remove(product_name) else: assert False assert len(output_files) == 0
def raw_from_asn(asn_file): """ Return a list of all input files from a given association. Parameters ---------- asn_file : str Filename for the ASN file. Returns ------- members : list of str A list of all input files in the association """ members = [] with open(asn_file) as f: asn = load_asn(f) for product in asn['products']: for member in product['members']: members.append(member['expname']) return members
def test_run_msaflagging(self, caplog): """Test msa flagging operation""" # Retrieve the data. collect_pipeline_cfgs('cfgs') self.get_data( *self.test_dir, 'jw95065006001_0_msa_twoslit.fits' ) asn_path = self.get_data( *self.test_dir, 'mos_udf_g235m_twoslit_spec2_asn.json' ) with open(asn_path) as fp: asn = load_asn(fp) for product in asn['products']: for member in product['members']: self.get_data( *self.test_dir, 'level2a_twoslit', member['expname'] ) # Run step. args = [ op.join('cfgs', 'calwebb_spec2.cfg'), asn_path, '--steps.msa_flagging.skip=false' ] Step.from_cmdline(args) # Test. assert 'Step msa_flagging running with args' in caplog.text assert 'Step msa_flagging done' in caplog.text for product in asn['products']: prod_name = product['name'] + '_cal.fits' assert op.isfile(prod_name)
def test_save_source_only(self): """Test saving the source-based files only""" datapath = ['test_datasets', 'fss', '93045', 'level2b'] asn_file = self.get_data( *datapath, 'jw93045-o010_20180725t035735_spec3_001_asn.json') for file in raw_from_asn(asn_file): self.get_data(*datapath, file) pipe = Spec3Pipeline() pipe.mrs_imatch.skip = True pipe.outlier_detection.skip = True pipe.resample_spec.skip = True pipe.cube_build.skip = True pipe.extract_1d.skip = True pipe.run(asn_file) # Check resulting product with open(asn_file) as fh: asn = load_asn(fh) base_name = asn['products'][0]['name'] product_name = base_name.format(source_id='s00000') + '_cal.fits' output_files = glob('*') if product_name in output_files: output_files.remove(product_name) else: assert False
def test_run_msaflagging(self, caplog): """Test msa flagging operation""" # Retrieve the data. collect_pipeline_cfgs('cfgs') self.get_data(*self.test_dir, 'jw95065006001_0_msa_twoslit.fits') asn_path = self.get_data(*self.test_dir, 'mos_udf_g235m_twoslit_spec2_asn.json') with open(asn_path) as fp: asn = load_asn(fp) for product in asn['products']: for member in product['members']: self.get_data(*self.test_dir, 'level2a_twoslit', member['expname']) # Run step. args = [ op.join('cfgs', 'calwebb_spec2.cfg'), asn_path, '--steps.msa_flagging.skip=false' ] Step.from_cmdline(args) # Test. assert 'Step msa_flagging running with args' in caplog.text assert 'Step msa_flagging done' in caplog.text for product in asn['products']: prod_name = product['name'] + '_cal.fits' assert op.isfile(prod_name)
def test_save_source_only(self): """Test saving the source-based files only""" datapath = ['test_datasets', 'fss', '93045', 'level2b'] asn_file = self.get_data(*datapath, 'jw93045-o010_20180725t035735_spec3_001_asn.json') for file in raw_from_asn(asn_file): self.get_data(*datapath, file) pipe = Spec3Pipeline() pipe.mrs_imatch.skip = True pipe.outlier_detection.skip = True pipe.resample_spec.skip = True pipe.cube_build.skip = True pipe.extract_1d.skip = True pipe.run(asn_file) # Check resulting product with open(asn_file) as fh: asn = load_asn(fh) base_name = asn['products'][0]['name'] product_name = base_name.format(source_id='ss400a1') + '_cal.fits' output_files = glob('*') if product_name in output_files: output_files.remove(product_name) else: assert False
def test_roundtrip(make_asns): generated, path, asn_format = make_asns asn_files = glob(os.path.join(path, '*.' + asn_format)) assert len(asn_files) == len(generated.associations) for asn_file in asn_files: with open(asn_file, 'r') as asn_fp: load_asn(asn_fp) orphaned_files = glob(os.path.join(path, '*.csv')) assert len(orphaned_files) == 1 orphaned = Table.read( orphaned_files[0], format='ascii', delimiter='|' ) assert len(orphaned) == len(generated.orphaned)
def test_load_asn_all(make_asns): generated, path, asn_format = make_asns asn_files = glob(os.path.join(path, '*.' + asn_format)) assert len(asn_files) == len(generated.associations) for asn_file in asn_files: with open(asn_file, 'r') as asn_fp: asns = load_asn(asn_fp, registry=generated.rules, first=False) assert len(asns) > 1
def test_base_roundtrip(): """Write/read created base association""" items = ['a', 'b', 'c'] asn = asn_from_list(items, rule=Association) name, serialized = asn.dump() reloaded = load_asn(serialized, registry=None) assert asn['asn_rule'] == reloaded['asn_rule'] assert asn['asn_type'] == reloaded['asn_type'] assert asn['members'] == reloaded['members']
def test_valid(): rules = AssociationRegistry() asn_file = helpers.t_path( 'data/test_image_asn.json' ) with open(asn_file, 'r') as asn_fp: asn = load_asn(asn_fp) valid_schema_list = rules.validate(asn) assert isinstance(valid_schema_list, list)
def test_serialize(full_pool_rules): """Test serializing roundtripping""" pool, rules, pool_fname = full_pool_rules asns = generate(pool, rules) for asn in asns: for format in asn.ioregistry: fname, serialized = asn.dump(format=format) assert serialized is not None recovered = load_asn(serialized) assert recovered is not None
def test_generate(full_pool_rules): """Run a full sized pool using all rules""" pool, rules, pool_fname = full_pool_rules asns = generate(pool, rules) assert len(asns) == 95 for asn in asns: asn_name, asn_store = asn.dump() asn_table = load_asn(asn_store) schemas = rules.validate(asn_table) assert len(schemas) > 0
def test_generate(full_pool_rules): """Run a full sized pool using all rules""" pool, rules, pool_fname = full_pool_rules asns = generate(pool, rules) assert len(asns) == 35 for asn in asns: asn_name, asn_store = asn.dump() asn_table = load_asn(asn_store) schemas = rules.validate(asn_table) assert len(schemas) > 0
def test_cmdline_change_rules(tmpdir): """Command line change the rule""" rule = 'Association' path = tmpdir.join('test_asn.json') inlist = ['a', 'b', 'c'] args = [ '-o', path.strpath, '-r', rule, ] args = args + inlist Main(args) with open(path.strpath, 'r') as fp: asn = load_asn(fp, registry=AssociationRegistry(include_bases=True)) assert inlist == asn['members']
def test_image2(): with open("data/asn_subtract_bg_flat.json") as fp: asn = load_asn(fp) raw_science_filename = get_data_from_url("17903858") raw_background_filename = get_data_from_url("17903855") asn["products"][0]["members"][0]["expname"] = raw_science_filename asn["products"][0]["members"][1]["expname"] = raw_background_filename with open("test_asn.json", "w") as out_asn: json.dump(asn, out_asn) iris_pipeline.pipeline.Image2Pipeline.call( "test_asn.json", config_file="data/image2_iris.cfg") ref_filename = get_data_from_url("17905553") with iris_pipeline.datamodels.IRISImageModel("test_iris_subtract_bg_flat_cal.fits") as out, \ iris_pipeline.datamodels.IRISImageModel(ref_filename) as ref: np.testing.assert_allclose(out.data, ref.data, rtol=1e-6)
def get_asn(self, path=None, docopy=True, get_members=True): """Copy association and association members from Artifactory remote resource to the CWD/truth. Updates self.input and self.input_remote upon completion Parameters ---------- path: str The remote path docopy : bool Switch to control whether or not to copy a file into the test output directory when running the test. If you wish to open the file directly from remote location or just to set path to source, set this to `False`. Default: `True` get_members: bool If an association is the input, retrieve the members. Otherwise, do not. """ if path is None: path = self.input_remote else: self.input_remote = path if docopy is None: docopy = self.docopy # Get the association JSON file self.input = get_bigdata(self._inputs_root, self._env, path, docopy=docopy) with open(self.input) as fp: asn = load_asn(fp) self.asn = asn # Get each member in the association as well if get_members: for product in asn['products']: for member in product['members']: fullpath = os.path.join(os.path.dirname(self.input_remote), member['expname']) get_bigdata(self._inputs_root, self._env, fullpath, docopy=self.docopy)
def test_default_roundtrip(): """Create/Write/Read a Level3 association""" product_name = 'test_product' items = { 'a': 'science', 'b': 'target_acq', 'c': 'somethingelse' } asn = asn_from_list( [(item, type_) for item, type_ in items.items()], product_name=product_name, with_exptype=True ) name, serialized = asn.dump() reloaded = load_asn(serialized) assert asn['asn_rule'] == reloaded['asn_rule'] assert asn['asn_type'] == reloaded['asn_type'] assert len(asn['products']) == len(reloaded['products'])
def run_spec2(jail, rtdata_module): """Run the Spec2Pipeline on a single exposure""" rtdata = rtdata_module # Setup the inputs asn_name = 'ifushort_ch12_rate_asn3.json' rtdata.get_data(INPUT_PATH + '/' + asn_name) asn_path = rtdata.input with open(asn_path, 'r') as asn_fh: asn = load_asn(asn_fh) member_path = Path(asn['products'][0]['members'][0]['expname']) rate_path = member_path.stem rate_path = replace_suffix(rate_path, 'rate') rate_path = INPUT_PATH + '/' + rate_path + member_path.suffix # Run the pipeline step_params = { 'input_path': rate_path, 'step': 'calwebb_spec2.cfg', 'args': [ '--steps.bkg_subtract.save_results=true', '--steps.assign_wcs.save_results=true', '--steps.imprint_subtract.save_results=true', '--steps.msa_flagging.save_results=true', '--steps.extract_2d.save_results=true', '--steps.flat_field.save_results=true', '--steps.srctype.save_results=true', '--steps.straylight.save_results=true', '--steps.fringe.save_results=true', '--steps.pathloss.save_results=true', '--steps.barshadow.save_results=true', '--steps.photom.save_results=true', '--steps.resample_spec.save_results=true', '--steps.cube_build.save_results=true', '--steps.extract_1d.save_results=true', ] } rtdata = rt.run_step_from_dict(rtdata, **step_params) return rtdata, asn_path
def test_cmdline_success(format, tmpdir): """Create Level3 associations in different formats""" path = tmpdir.join('test_asn.json') product_name = 'test_product' inlist = ['a', 'b', 'c'] args = [ '-o', path.strpath, '--product-name', product_name, '--format', format ] args = args + inlist Main(args) with open(path.strpath, 'r') as fp: asn = load_asn(fp, format=format) assert len(asn['products']) == 1 assert asn['products'][0]['name'] == product_name members = asn['products'][0]['members'] expnames = [ member['expname'] for member in members ] assert inlist == expnames
def test_asn_naming(self): """Test a full run""" # Get the data collect_pipeline_cfgs('cfgs') asn_path = self.get_data( self.test_dir, 'wfs_3sets_asn.json' ) with open(asn_path) as fh: asn = load_asn(fh) for product in asn['products']: for member in product['members']: self.get_data( self.test_dir, member['expname'] ) input_files = glob('*') # Run the step. args = [ op.join('cfgs', 'calwebb_wfs-image3.cfg'), asn_path ] Step.from_cmdline(args) # Test. output_files = glob('*') for input_file in input_files: output_files.remove(input_file) print('output_files = {}'.format(output_files)) for product in asn['products']: prod_name = product['name'] prod_name = format_product(prod_name, suffix='wfscmb') prod_name += '.fits' assert prod_name in output_files output_files.remove(prod_name) # There should be no more files assert len(output_files) == 0
def test_image2_subarray(tmp_path): with open("iris_pipeline/tests/data/asn_subtract_bg_flat.json") as fp: asn = load_asn(fp) raw_science_filename = get_data_from_url("17903858") input_model = iris_pipeline.datamodels.IRISImageModel(raw_science_filename) xstart = 100 ystart = 200 xsize = 50 ysize = 60 input_model.meta.subarray.name = "CUSTOM" input_model.meta.subarray.xstart = xstart + 1 input_model.meta.subarray.ystart = ystart + 1 input_model.meta.subarray.xsize = xsize input_model.meta.subarray.ysize = ysize subarray_slice = np.s_[ystart:ystart + ysize, xstart:xstart + xsize] input_model.data = np.array(input_model.data[subarray_slice]) input_model.dq = np.array(input_model.dq[subarray_slice]) input_model.err = np.array(input_model.err[subarray_slice]) raw_science_subarray_filename = tmp_path / "temp_subarray_science.fits" input_model.write(raw_science_subarray_filename) raw_background_filename = get_data_from_url("17903855") asn["products"][0]["members"][0]["expname"] = str( raw_science_subarray_filename) asn["products"][0]["members"][1]["expname"] = raw_background_filename with open("test_asn.json", "w") as out_asn: json.dump(asn, out_asn) iris_pipeline.pipeline.ProcessImagerL2Pipeline.call( "test_asn.json", config_file="iris_pipeline/tests/data/image2_iris.cfg") ref_filename = get_data_from_url("17905553") with iris_pipeline.datamodels.IRISImageModel("test_iris_subtract_bg_flat_cal.fits") as out, \ iris_pipeline.datamodels.IRISImageModel(ref_filename) as ref: np.testing.assert_allclose(out.data, ref.data[subarray_slice], rtol=1e-6)
def test_level2_from_cmdline(tmpdir): """Create a level2 assocaition from the command line""" rule = 'DMSLevel2bBase' path = tmpdir.join('test_asn.json') inlist = ['a', 'b', 'c'] args = [ '-o', path.strpath, '-r', rule, ] args = args + inlist Main(args) with open(path.strpath, 'r') as fp: asn = load_asn(fp, registry=AssociationRegistry(include_bases=True)) assert asn['asn_rule'] == 'DMSLevel2bBase' assert asn['asn_type'] == 'None' products = asn['products'] assert len(products) == len(inlist) for product in products: assert product['name'] in inlist members = product['members'] assert len(members) == 1 member = members[0] assert member['expname'] == product['name'] assert member['exptype'] == 'science'
def test_unserialize(): """Test basic unserializing""" asn_file = t_path('data/asn_mosaic.json') with open(asn_file, 'r') as asn_fp: asn = load_asn(asn_fp) assert isinstance(asn, dict)
def from_level2_schema(): with open(t_path('data/asn_level2.json')) as asn_file: asn = load_asn(asn_file) return [asn]