Beispiel #1
0
        fsk.write_flat_map(self.get_output('depth_map'), depth, descript=desc)

        ####
        # Implement final cuts
        # - Mag. limit
        # - S/N cut
        # - Star-galaxy separator
        # - Blending
        sel = ~(sel_maglim * sel_gals * sel_fluxcut * sel_blended)
        logger.info("Will lose %d objects to depth, S/N and stars." %
                    (np.sum(sel)))
        cat.remove_rows(sel)

        ####
        # Write final catalog
        # 1- header
        logger.info("Writing cut catalog.")
        hdr = fits.Header()
        hdr['BAND'] = self.config['band']
        hdr['DEPTH'] = self.config['depth_cut']
        prm_hdu = fits.PrimaryHDU(header=hdr)
        # 2- Catalog
        cat_hdu = fits.table_to_hdu(cat)
        # 3- Actual writing
        hdul = fits.HDUList([prm_hdu, cat_hdu])
        hdul.writeto(self.get_output('cut_catalog'), overwrite=True)


if __name__ == '__main__':
    cls = PipelineStage.main()
Beispiel #2
0
        run_config = {
            "log_dir": dirname,
            "output_dir": dirname,
            "resume": False,
            "python_paths": [dirname, mod_dir],
        }

        launcher_config = {"interval": 0.5, "name": "mini"}
        site_config = {"name": "local", "python_paths": [dirname, mod_dir]}
        load(launcher_config, [site_config])

        # note that we don't add the subdir here
        with extra_paths(dirname):
            import my_stage

            print(os.environ["PYTHONPATH"])
            print(sys.path)
            print(os.listdir(dirname))
            pipeline = MiniPipeline([{"name": "MyStage"}], launcher_config)
            pipeline.initialize({}, run_config, config_path)
            status = pipeline.run()
            log = open(dirname + "/MyStage.out").read()
            print(log)
            assert status == 0


# this has to be here because we test running the pipeline
if __name__ == "__main__":
    PipelineStage.main()