Ejemplo n.º 1
0
    def makeCompatibleRepo(self, repoDir, calibRepoDir):
        """Set up a directory as a repository compatible with this dataset.

        If the directory already exists, any files required by the dataset will
        be added if absent; otherwise the directory will remain unchanged.

        Parameters
        ----------
        repoDir : `str`
            The directory where the output repository will be created.
        calibRepoDir : `str`
            The directory where the output calibration repository will be created.
        """
        mapperArgs = {'mapperArgs': {'calibRoot': calibRepoDir}}
        if _isRepo(self.templateLocation):
            # Stub repo is not a parent because can't mix v1 and v2 repositories in parents list
            Butler(inputs=[{
                "root": self.templateLocation,
                "mode": "r"
            }],
                   outputs=[{
                       "root": repoDir,
                       "mode": "rw",
                       **mapperArgs
                   }])
        else:
            Butler(inputs=[{
                "root": self._stubInputRepo,
                "mode": "r"
            }],
                   outputs=[{
                       "root": repoDir,
                       "mode": "rw",
                       **mapperArgs
                   }])
Ejemplo n.º 2
0
def inject_fakes_to_calexp(calexp_repo,
                           calexp_id,
                           magVar,
                           coords,
                           poisson=False):
    """This function injects fakes onto a calexp exposure at given positions.
       caelxp_repo:    An empty calexp repo which is used to save the injected exposure.
       calexp_id:      The data id of the calexp exposure
       magVar:         The injected magnitude
       coords:         A list of [x, y] pixel coordinates
       poisson:        If true, add Poisson fluctuations to the fake model
    """

    calexp_butler = Butler(calexp_repo)
    calexp_photoCalib = calexp_butler.get('calexp_photoCalib', calexp_id)
    calexp_exposure = calexp_butler.get('calexp', dataId=calexp_id)
    for coord in coords:
        x, y = coord[0], coord[1]
        inject_star(calexp_exposure,
                    calexp_photoCalib,
                    x,
                    y,
                    magVar,
                    poisson=poisson)
    calexp_butler.put(calexp_exposure, 'calexp', dataId=calexp_id)
    def setUp(self):
        refCatDir = os.path.join(os.path.dirname(__file__), "data",
                                 "sdssrefcat")

        self.bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0),
                                  afwGeom.Extent2I(3001, 3001))
        self.ctrPix = afwGeom.Point2I(1500, 1500)
        metadata = dafBase.PropertySet()
        metadata.set("RADECSYS", "FK5")
        metadata.set("EQUINOX", 2000.0)
        metadata.set("CTYPE1", "RA---TAN")
        metadata.set("CTYPE2", "DEC--TAN")
        metadata.set("CUNIT1", "deg")
        metadata.set("CUNIT2", "deg")
        metadata.set("CRVAL1", 215.5)
        metadata.set("CRVAL2", 53.0)
        metadata.set("CRPIX1", self.ctrPix[0] + 1)
        metadata.set("CRPIX2", self.ctrPix[1] + 1)
        metadata.set("CD1_1", 5.1e-05)
        metadata.set("CD1_2", 0.0)
        metadata.set("CD2_2", -5.1e-05)
        metadata.set("CD2_1", 0.0)
        self.tanWcs = afwImage.makeWcs(metadata)
        self.exposure = afwImage.ExposureF(self.bbox)
        self.exposure.setWcs(self.tanWcs)
        self.exposure.setFilter(afwImage.Filter("r", True))
        butler = Butler(refCatDir)
        self.refObjLoader = LoadIndexedReferenceObjectsTask(butler=butler)
Ejemplo n.º 4
0
def load_tract(repo, tract, patches=None, **kwargs):
    """Merge catalogs from forced-photometry coadds across available filters.

    Parameters
    --
    tract: int
        Tract of sky region to load
    repo: str
        File location of Butler repository+rerun to load.
    patches: list of str
        List of patches.  If not specified, will default to '0,0'--'7,7'.

    Returns
    --
    AstroPy Table of merged catalog
    """
    butler = Butler(repo)
    if patches is None:
        patches = ['%d,%d' % (i, j) for i in range(8) for j in range(8)]

    merged_patch_cats = []
    for patch in patches:
        try:
            this_patch_merged_cat = load_patch(butler, tract, patch, **kwargs)
        except NoResults as e:
            print(e)
            continue
        merged_patch_cats.append(this_patch_merged_cat)

    merged_tract_cat = vstack(merged_patch_cats)
    return merged_tract_cat
Ejemplo n.º 5
0
def load_tract(repo, tract, patches=None, **kwargs):
    """Merge catalogs from forced-photometry coadds across available filters.

    Parameters
    --
    tract: int
        Tract of sky region to load
    repo: str
        File location of Butler repository+rerun to load.
    patches: list of str
        List of patches.  If not specified, will default to '0,0'--'7,7'.

    Returns
    --
    AstroPy Table of merged catalog
    """
    butler = Butler(repo)

    if patches is None:
        # Extract the patches for this tract from the skymap
        skymap = butler.get(datasetType='deepCoadd_skyMap')
        patches = ['%d,%d' % patch.getIndex() for patch in skymap[tract]]

    merged_patch_cats = []
    for patch in patches:
        this_patch_merged_cat = load_patch(butler, tract, patch, **kwargs)
        # Event if this_patch_merged_cat is an empty Table, it's still fine to append to the list here.
        # They will get vstacked away below.
        merged_patch_cats.append(this_patch_merged_cat)

    merged_tract_cat = vstack(merged_patch_cats)
    return merged_tract_cat
Ejemplo n.º 6
0
def get_butler_by_repo(repo, **kwargs):
    """Construct and return a Bulter for the requested repository

    Parameters
    ----------
    repo : `str`
        Name of the repo, e.g., 'TS8' | 'BOT'
    kwargs
        Passed to the Bulter constructor

    Returns
    -------
    butler : `Butler`
        the requested Bulter

    Raises
    ------
    KeyError : If repo does not match any known repository
    """
    try:
        repo_path = BUTLER_REPO_DICT[repo]
    except KeyError:
        raise KeyError("Unknown Bulter repo key %s" % repo)
    butler = Butler(repo_path, **kwargs)
    return butler
Ejemplo n.º 7
0
def load_tract(repo, tract, patches=None, **kwargs):
    """Merge catalogs from forced-photometry coadds across available filters.

    Parameters
    --
    tract: int
        Tract of sky region to load
    repo: str
        File location of Butler repository+rerun to load.
    patches: list of str
        List of patches.  If not specified, will default to '0,0'--'7,7'.

    Returns
    --
    Pandas DataFrame of merged catalog
    """
    butler = Butler(repo)

    if patches is None:
        # Extract the patches for this tract from the skymap
        skymap = butler.get(datasetType='deepCoadd_skyMap')
        patches = ['%d,%d' % patch.getIndex() for patch in skymap[tract]]

    merged_tract_cat = pd.DataFrame()
    for patch in patches:
        this_patch_merged_cat = load_patch(butler, tract, patch, **kwargs)
        merged_tract_cat.append(this_patch_merged_cat)

    return merged_tract_cat
Ejemplo n.º 8
0
    def setUp(self):

        # Load sample input from disk
        testDir = os.path.dirname(__file__)
        self.srcCat = afwTable.SourceCatalog.readFits(
            os.path.join(testDir, "data", "v695833-e0-c000.xy.fits"))

        self.srcCat["slot_ApFlux_fluxSigma"] = 1
        self.srcCat["slot_PsfFlux_fluxSigma"] = 1

        # The .xy.fits file has sources in the range ~ [0,2000],[0,4500]
        # which is bigger than the exposure
        self.bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0),
                                  afwGeom.Extent2I(2048, 4612))
        smallExposure = afwImage.ExposureF(
            os.path.join(testDir, "data", "v695833-e0-c000-a00.sci.fits"))
        self.exposure = afwImage.ExposureF(self.bbox)
        self.exposure.setWcs(smallExposure.getWcs())
        self.exposure.setFilter(smallExposure.getFilter())
        self.exposure.setCalib(smallExposure.getCalib())

        # Make a reference loader
        butler = Butler(RefCatDir)
        self.refObjLoader = LoadIndexedReferenceObjectsTask(butler=butler)
        logLevel = Log.TRACE
        self.log = Log.getLogger('testPhotoCal')
        self.log.setLevel(logLevel)

        self.config = PhotoCalConfig()

        # The test and associated data have been prepared on the basis that we
        # use the PsfFlux to perform photometry.
        self.config.fluxField = "base_PsfFlux_flux"
Ejemplo n.º 9
0
 def setUp(self):
     self.butler = Butler(
         os.path.join(weeklyRerun, "pipeline", self.configuration,
                      "pipeline"))
     self.visits = dict(brn=getBrnVisits,
                        bmn=getBmnVisits)[self.configuration]()
     self.design = PfsDesign.read(1, weeklyRaw)
Ejemplo n.º 10
0
    def __init__(
        self, butlerpath, destination=None, dataset=None, engine="pyarrow", sample_frac=None, num_buckets=8,
    ):

        self._butler = Butler(butlerpath)
        if dataset is None:
            dataset = self._default_dataset

        self.dataset = dataset
        if destination is None:
            destination = f"{butlerpath}/ktk"
        self.destination = destination
        self.sample_frac = sample_frac
        self.num_buckets = num_buckets

        self.stats_path = f"{self.destination}/{self.dataset}_stats.parq"

        self._store = None
        self.engine = engine
        self.metadata = self.butler.get("qaDashboard_info")

        self.dataIds = [
            dataId for dataId in self.iter_dataId() if self.butler.datasetExists(self.dataset, dataId)
        ]

        self.filters = [filt for filt in self.metadata["visits"].keys()]
        self.dataIds_by_filter = {
            filt: [d for d in self.dataIds if d["filter"] == filt] for filt in self.filters
        }

        self._filenames = None
        self._filenames_by_filter = None
Ejemplo n.º 11
0
    def setUp(self):
        # Load sample input from disk
        testDir = os.path.dirname(__file__)

        self.srcSet = SourceCatalog.readFits(os.path.join(testDir, "v695833-e0-c000.xy.fits"))

        self.bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0), afwGeom.Extent2I(2048, 4612))  # approximate
        # create an exposure with the right metadata; the closest thing we have is
        # apparently v695833-e0-c000-a00.sci.fits, which is much too small
        smallExposure = ExposureF(os.path.join(testDir, "v695833-e0-c000-a00.sci.fits"))
        self.exposure = ExposureF(self.bbox)
        self.exposure.setWcs(smallExposure.getWcs())
        self.exposure.setFilter(smallExposure.getFilter())
        # copy the pixels we can, in case the user wants a debug display
        mi = self.exposure.getMaskedImage()
        mi.assign(smallExposure.getMaskedImage(), smallExposure.getBBox())

        logLevel = Log.INFO
        refCatDir = os.path.join(testDir, "data", "sdssrefcat")
        butler = Butler(refCatDir)
        refObjLoader = LoadIndexedReferenceObjectsTask(butler=butler)
        astrometryConfig = AstrometryTask.ConfigClass()
        self.astrom = AstrometryTask(config=astrometryConfig, refObjLoader=refObjLoader)
        self.astrom.log.setLevel(logLevel)
        # Since our sourceSelector is a registry object we have to wait for it to be created
        # before setting default values.
        self.astrom.matcher.sourceSelector.config.minSnr = 0
Ejemplo n.º 12
0
def loadAllTables(filters, newpath, oldpath, dataPath, patch, tract):
    logger.info("Loading new catalog")
    newCats = loadCatalogs(filters, newpath, "template")
    logger.info("Loading new flux conserved catalog")
    newCats2 = loadCatalogs(filters, newpath)
    logger.info("Loading old catalog")
    oldCats = loadCatalogs(filters, oldpath)
    logger.info("Building astropy tables")
    newFlux, newSed = getAllFlux(newCats, filters)
    newFlux2, newSed2 = getAllFlux(newCats2, filters)
    newTable = buildAllTables(newCats, newFlux, newSed, filters)
    newTable2 = buildAllTables(newCats2, newFlux2, newSed2, filters)
    newTable = newTable[(newTable["parent"] != 0) & ~np.isnan(newTable["x"])]
    newTable2 = newTable2[(newTable2["parent"] != 0)
                          & ~np.isnan(newTable2["x"])]
    oldTables = OrderedDict([(f, buildTable(oldCats[f])) for f in filters])
    logger.info("matching results")
    oldTable, matches = matchAllCatalogs(newTable, oldTables, filters)
    matchedNew = newTable[matches]
    matchedNew2 = newTable2[matches]

    logger.info("loading calexps")
    butler = Butler(inputs=dataPath)
    calexp = OrderedDict()
    for f in filters:
        calexp[f] = butler.get('deepCoadd_calexp',
                               patch=patch,
                               filter="HSC-" + f,
                               tract=tract)
    return oldTable, matchedNew, matchedNew2, calexp, newCats
Ejemplo n.º 13
0
 def setUp(self):
     inputDir = os.path.join(ROOT, "data", "input")
     self.testDir = tempfile.mkdtemp(dir=os.path.join(ROOT, 'tests'),
                                     prefix=type(self).__name__ + '-')
     self.butler = Butler(inputs=inputDir,
                          outputs={
                              "root": self.testDir,
                              "mode": 'rw'
                          })
Ejemplo n.º 14
0
def get_diff_calexp_coadd_butler(diff_repo, calexp_repo, coadd_repo):
    """
    Returns:
    butler_dict = {
        'diff': diff_butler,
        'calexp_butler': calexp_butler,
        'coadd_butler': coadd_butler
    }
    """
    diff_butler = Butler(diff_repo)
    calexp_butler = Butler(calexp_repo)
    coadd_butler = Butler(coadd_repo)
    butler_dict = {
        'diff': diff_butler,
        'calexp': calexp_butler,
        'coadd': coadd_butler
    }
    return butler_dict
 def setUp(self):
     np.random.seed(12345)
     self.butler = Butler(RefCatDir)
     refObjLoader = LoadIndexedReferenceObjectsTask(butler=self.butler)
     center = lsst.afw.geom.SpherePoint(215.5, 53.0, lsst.afw.geom.degrees)
     radius = 0.5 * lsst.afw.geom.degrees
     self.filter = "r"
     self.references = refObjLoader.loadSkyCircle(center, radius,
                                                  self.filter).refCat
Ejemplo n.º 16
0
def makeLinearizerDecam(fromFile, force=False, verbose=False):
    """Convert the specified DECam linearity FITS table to standard LSST format

    Details:
    - Input format is one table per CCD, HDU is amplifier number,
        the table has 3 columns: ADU, ADU_LINEAR_A, ADU_LINEAR_B.
        The values of ADU contiguous (0, 1, 2...) but check and error out if not.
        The values of the latter two are replacements (0+delta0, 1+delta1, 2+delta2...)
        and this is converted to offsets for the LSST linearization tables (delta0, delta1, delta2...)
    - Output is a set of LinearizeLookupTable instances, one per CCD, saved as dataset type "linearizer"
    - The row indices for the linearization lookup table are (row index=amp name): 0=A, 1=B

    @param[in] fromFile  path to DECam linearity table (a FITS file with one HDU per amplifier)
    """
    print("Making DECam linearizers from %r" % (fromFile, ))
    butler = Butler(mapper=DecamMapper)
    linearizerDir = DecamMapper.getLinearizerDir()
    if os.path.exists(linearizerDir):
        if not force:
            print("Output directory %r exists; use --force to replace" %
                  (linearizerDir, ))
            sys.exit(1)
        print("Replacing data in linearizer directory %r" % (linearizerDir, ))
    else:
        print("Creating linearizer directory %r" % (linearizerDir, ))
        os.makedirs(linearizerDir)

    camera = DecamMapper().camera
    fromHDUs = fits.open(fromFile)[1:]  # HDU 0 has no data
    assert len(fromHDUs) == len(camera)
    for ccdind, (detector, hdu) in enumerate(zip(camera, fromHDUs)):
        ccdnum = ccdind + 1
        if verbose:
            print("ccdnum=%s; detector=%s" % (ccdnum, detector.getName()))
        fromData = hdu.data
        assert len(fromData.dtype) == 3
        lsstTable = np.zeros((2, len(fromData)), dtype=np.float32)
        uncorr = fromData["ADU"]
        if not np.allclose(uncorr, np.arange(len(fromData))):
            raise RuntimeError(
                "ADU data not a range of integers starting at 0")
        for i, ampName in enumerate("AB"):
            # convert DECam replacement table to LSST offset table
            if verbose:
                print("DECam table for %s=%s..." % (
                    ampName,
                    fromData["ADU_LINEAR_" + ampName][0:5],
                ))
            lsstTable[i, :] = fromData["ADU_LINEAR_" + ampName] - uncorr
            if verbose:
                print("LSST  table for %s=%s..." % (
                    ampName,
                    lsstTable[i, 0:5],
                ))
        linearizer = LinearizeLookupTable(table=lsstTable, detector=detector)
        butler.put(linearizer, "linearizer", dataId=dict(ccdnum=ccdnum))
    print("Wrote %s linearizers" % (ccdind + 1, ))
Ejemplo n.º 17
0
    def ingest(cls, root, camera, visit, filenames, sensors, metadata):
        """Add all images from an external visit (a full-focal-plane
        exposure) to a data repository.

        This both symlinks the external data files to the appropriate
        location in the directory structure and adds the necessary
        rows to the SQLite registry tables.

        Parameters
        ----------
        root : str
            Directory of the data repository to add data to.  Must have
            an existing "registry.sqlite3" file present directly in the
            root and a _mapper file pointing to HscAndExtMapper.
        camera : str
            Name of the camera used to produced the external observation.
            Must have an entry in ExternalImage.CAMERA_INFO.
        visit : int
            Original integer visit ID for the observation, *before* adding
            CAMERA_INFO[camera]["ID"]*CAMERA_ID_MULTIPLIER.
        filenames : list
            A list of file names containing the external data files, either
            relative to the current directory or absolute.
        sensors : list
            A list of integer sensor IDs corresponding to the filenames list.
        metadata : VisitMetadata
            An object containing additional metadata for this visit to be
            added to the registry.  See VisitMetadata for a description of
            what attributes are required.
        """
        db = sqlite3.connect(os.path.join(root, "registry.sqlite3"))
        butler = Butler(inputs=[root])
        visit += cls.CAMERA_INFO[camera]["id"] * cls.CAMERA_ID_MULTIPLIER
        ccdCols = [
            "filter", "dateObs", "taiObs", "field", "expId", "pointing",
            "dataType", "pa"
        ]
        ccdSql = "INSERT INTO raw (visit, ccd, {}) VALUES (?, ?, {})".format(
            ", ".join(ccdCols), ", ".join(["?"] * len(ccdCols)))
        ccdValues = tuple(getattr(metadata, col) for col in ccdCols)
        visitCols = ["filter", "dateObs", "taiObs", "field"]
        visitSql = "INSERT INTO raw_visit (visit, {}) VALUES (?, {})".format(
            ", ".join(visitCols), ", ".join(["?"] * len(visitCols)))
        visitValues = tuple(getattr(metadata, col) for col in visitCols)
        for filename, sensor in zip(filenames, sensors):
            outputFileName = butler.get("external_filename",
                                        visit=visit,
                                        ccd=sensor)[0]
            os.symlink(filename, outputFileName)
            db.execute(ccdSql, (
                visit,
                sensor,
            ) + ccdValues)
        db.execute(visitSql, (visit, ) + visitValues)
        db.commit()
        return visit
Ejemplo n.º 18
0
 def butler(self):
     if not self._butler:
         if self.gen3:
             GEN3_REPO_ROOT = os.path.join(getPackageDir("ci_hsc_gen2"),
                                           "DATAgen3")
             self._butler = lsst.daf.butler.Butler(
                 GEN3_REPO_ROOT, collections=self.collection)
         else:
             self._butler = Butler(self.root)
     return self._butler
Ejemplo n.º 19
0
 def testDetectors(self):
     """Test that the detector returned by the gen 2 butler is the same
     as the expected one.
     """
     for root, did, expected in zip(self.roots, self.ids, self.expecteds):
         butler = Butler(root)
         raw = butler.get("raw", dataId=did)
         for amp1, amp2 in zip(expected['detector'], raw.getDetector()):
             with self.subTest(amp=amp1.getName()):
                 self.assertEqual(amp1.getName(), amp2.getName())
                 self.assertAmpRawBBoxesEqual(amp1, amp2)
Ejemplo n.º 20
0
    def create(self):
        """Create a Butler.

        @returns a new Butler.
        """

        if hasattr(self.mapper, 'root'):
            root = self.mapper.root
        else:
            root = None
        return Butler(root=root, mapper=self.mapper)
Ejemplo n.º 21
0
def installExternalData():
    """Command-line interface for installing external data"""
    parser = argparse.ArgumentParser(description="Install extenral data")
    parser.add_argument("source", help="Source of external data")
    parser.add_argument("root", help="Butler data root")
    parser.add_argument("--tract", type=int, default=0, help="Tract identifier")
    parser.add_argument("--visitCcd", nargs=2, type=int, default=[], action="append",
                        help="Visit and CCD of jointcal data to ingest (multiple OK)")
    args = parser.parse_args()

    butler = Butler(args.root)
    installJointcal(args.source, butler, args.tract, args.visitCcd)
Ejemplo n.º 22
0
 def testAssemble(self):
     """Test the assembly of E2V and ITL sensors
     """
     task = AssembleCcdTask()
     # exclude LATISS for this test since we don't have an expected output
     for root, did, expected in zip(self.roots, self.ids, self.expecteds):
         butler = Butler(root)
         raw = butler.get("raw", dataId=did)
         assembled = task.assembleCcd(raw)
         count = numpy.sum(expected['expected'].read().array -
                           assembled.getImage().array)
         self.assertEqual(count, 0)
Ejemplo n.º 23
0
def load_and_save_tract(repo,
                        tract,
                        filename,
                        key_prefix='coadd',
                        patches=None,
                        overwrite=True,
                        verbose=False,
                        **kwargs):
    """Save catalogs to HDF5 from forced-photometry coadds across available filters.

    Iterates through patches, saving each in append mode to the save HDF5 file.

    Parameters
    --
    repo: str
        File location of Butler repository+rerun to load.
    tract: int
        Tract of sky region to load
    filename: str
        Filename for HDF file.
    key_prefix: str
        Base for the key in the HDF file.
        Keys will be of the form "%s_%d_%s" % (keybase, tract, patch)
        With the addition that the comma will be removed from the patch name
        to provide a valid Python identifier: e.g., 'coadd_4849_11'
    overwrite: bool
        Overwrite an existing HDF file.
    """
    butler = Butler(repo)

    if patches is None:
        # Extract the patches for this tract from the skymap
        skymap = butler.get(datasetType='deepCoadd_skyMap')
        patches = ['%d,%d' % patch.getIndex() for patch in skymap[tract]]

    for patch in patches:
        if verbose:
            print("Processing tract %d, patch %s" % (tract, patch))
        patch_merged_cat = load_patch(butler,
                                      tract,
                                      patch,
                                      verbose=verbose,
                                      **kwargs)
        if len(patch_merged_cat) == 0:
            if verbose:
                print("  No good entries for tract %d, patch %s" %
                      (tract, patch))
            continue

        key = '%s_%d_%s' % (key_prefix, tract, patch)
        key = valid_identifier_name(key)
        patch_merged_cat.to_pandas().to_hdf(filename, key, format='fixed')
    def setUp(self):
        refCatDir = os.path.join(os.path.dirname(__file__), "data", "sdssrefcat")

        self.bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0), afwGeom.Extent2I(3001, 3001))
        crpix = afwGeom.Box2D(self.bbox).getCenter()
        self.tanWcs = afwGeom.makeSkyWcs(crpix=crpix,
                                         crval=afwGeom.SpherePoint(215.5, 53.0, afwGeom.degrees),
                                         cdMatrix=afwGeom.makeCdMatrix(scale=5.1e-5*afwGeom.degrees))
        self.exposure = afwImage.ExposureF(self.bbox)
        self.exposure.setWcs(self.tanWcs)
        self.exposure.setFilter(afwImage.Filter("r", True))
        butler = Butler(refCatDir)
        self.refObjLoader = LoadIndexedReferenceObjectsTask(butler=butler)
Ejemplo n.º 25
0
    def connect(self):
        # search for metadata.yaml file
        # 1. Look in path directory i.e. '/project/tmorton/tickets/DM-20015/RC2_w18/metadata.yaml'
        # 2. Look for datafolder in current directory i.e. './RC2_w18/metadata.yaml'
        # 3. Look for datafolder in dir specified in LSST_META env variable i.e. /user/name/lsst_meta/RC2_w18/metadata.yaml'
        #    when LSST_META='/user/name/lsst_meta'
      
        print('-- read metadata file --')

        # if Butler is available use it to connect. If not available we are reading from disk
        if Butler:
            try:
                print('-- connect to butler --')
                self.conn = Butler(str(self.path))
                self.metadata = self.conn.get('qaDashboard_metadata')
                self.failures = self.metadata.get('failures', {})
                if not self.filters:
                    self.filters = list(self.metadata['visits'].keys())
                if not self.tracts:
                    all_tracts = [list(self.metadata['visits'][filt].keys()) for filt in self.filters]
                    self.tracts = list(set([int(y) for x in all_tracts for y in x]))
            except:
                print(f'{self.path} is not available in Butler attempting to read parquet files instead')
        else:
            if self.path.joinpath(METADATA_FILENAME).exists():
                self.metadata_path = self.path.joinpath(METADATA_FILENAME)
            else:
                self.metadata_path = Path(os.environ.get('LSST_META', os.curdir)).joinpath(self.path.name, METADATA_FILENAME)

            with self.metadata_path.open('r') as f:
                self.metadata = yaml.load(f, Loader=yaml.SafeLoader)
                self.failures = self.metadata.get('failures', {})
                if self.tracts is None:
                    self.tracts = list(set(x for v in self.metadata['visits'].values() for x in v.keys())) 


        print('-- read coadd table --')
        self.fetch_coadd_table()  # currently ignoring forced/unforced
        # update metadata based on coadd table fields
        print('-- generate other metadata fields --')
        df = self.coadd['qaDashboardCoaddTable']
        self.flags = df.columns[df.dtypes == bool].to_list()
        if not Butler:
            self.filters = list(self.metadata['visits'].keys()) 
        self.metrics = set(df.columns.to_list()) - set(self.flags) - set(['patch', 'dec', 'label', 'psfMag', 
                                                                         'ra', 'filter', 'dataset', 'dir0', 'tract'])
        print('-- read visit data --')
        self.fetch_visits_by_metric()
        print('-- done with reads --')
Ejemplo n.º 26
0
 def __init__(self, repo, vb=False, path_to_tracts=''):
     self.repo = repo
     # Instantiate a butler, or report failure:
     from lsst.daf.persistence import Butler
     try:
         self.butler = Butler(repo)
     except:
         self.butler = None
         print("Warning: failed to instantiate a butler to get data from repo '"+repo+"'")
         return None
     # Set up some internal variables:
     self.vb = vb
     self.exists = {}
     self.existence = False
     self.counts = {}
     self.tracts = []
     self.path_to_tracts = path_to_tracts
     if path_to_tracts != '':
         try:
             self.skymap_butler = Butler(repo + path_to_tracts)
         except:
             self.skymap_butler = None
             print("Warning: failed to find a skyMap for the path " + repo + path_to_tracts)
     return
    def testBasics(self):
        """Test detection and measurement on simple synthesized data
        """
        bbox = Box2I(Point2I(256, 100), Extent2I(128, 127))
        minCounts = 5000
        maxCounts = 50000
        starSigma = 1.5
        numX = 5
        numY = 5
        coordList = self.makeCoordList(
            bbox=bbox,
            numX=numX,
            numY=numY,
            minCounts=minCounts,
            maxCounts=maxCounts,
            sigma=starSigma,
        )
        kwid = 11  # kernel width
        sky = 2000
        # create an exposure without a Wcs; add the Wcs later
        exposure = plantSources(bbox=bbox,
                                kwid=kwid,
                                sky=sky,
                                coordList=coordList,
                                addPoissonNoise=True)

        schema = SourceTable.makeMinimalSchema()

        config = DetectAndMeasureTask.ConfigClass()
        task = DetectAndMeasureTask(config=config, schema=schema)

        butler = Butler(root=InputDir)
        dataRef = butler.dataRef("calexp", dataId=dict(visit=1))
        wcs = dataRef.get("raw").getWcs()
        exposure.setWcs(wcs)
        exposureIdInfo = dataRef.get("expIdInfo")
        taskRes = task.run(exposure=exposure, exposureIdInfo=exposureIdInfo)
        self.assertEqual(len(taskRes.sourceCat), numX * numY)
        schema = taskRes.sourceCat.schema
        centroidFlagKey = schema.find("slot_Centroid_flag").getKey()
        parentKey = schema.find("parent").getKey()
        psfFluxFlagKey = schema.find("slot_PsfFlux_flag").getKey()
        psfFluxKey = schema.find("slot_PsfFlux_flux").getKey()
        for src in taskRes.sourceCat:
            self.assertFalse(src.get(centroidFlagKey))  # centroid found
            self.assertEqual(src.get(parentKey), 0)  # not debelended
            self.assertFalse(src.get(psfFluxFlagKey))  # flux measured
            self.assertGreater(src.get(psfFluxKey), 4000)  # flux sane
Ejemplo n.º 28
0
def __main__(args):
    repo_directory = "/lsstdata/offline/teststand/BOT/gen2repo"
    butler = Butler(repo_directory)
    runs = ['12672', '12673', '12844', '12845', '12853', '12855']
    run = runs[0]
    sensors = set(
        butler.queryMetadata('raw', ['raftName', 'detectorName'],
                             dataId={
                                 'run': run,
                                 'imageType': 'BIAS'
                             }))

    run_dark_regression_analysis_on_all_sensors(butler,
                                                runs,
                                                sensors,
                                                show_plots=False)
Ejemplo n.º 29
0
 def getTargetList(parsedCmd, **kwargs):
     parentDir = parsedCmd.input
     while os.path.exists(os.path.join(parentDir, "_parent")):
         parentDir = os.path.realpath(os.path.join(parentDir, "_parent"))
     butler2 = Butler(root=os.path.join(parentDir, "rerun",
                                        parsedCmd.prior_rerun),
                      calibRoot=parsedCmd.calib)
     idParser = parsedCmd.prior.__class__(parsedCmd.prior.level)
     idParser.idList = parsedCmd.prior.idList
     idParser.datasetType = parsedCmd.prior.datasetType
     butler = parsedCmd.butler
     parsedCmd.butler = butler2
     idParser.makeDataRefList(parsedCmd)
     parsedCmd.butler = butler
     return [(parsedCmd.id.refList,
              dict(priorRefList=idParser.refList, **kwargs))]
Ejemplo n.º 30
0
    def setUp(self):

        # Load sample input from disk
        testDir = os.path.dirname(__file__)
        self.srcCat = afwTable.SourceCatalog.readFits(
            os.path.join(testDir, "data", "v695833-e0-c000.xy.fits"))

        self.srcCat["slot_ApFlux_fluxSigma"] = 1
        self.srcCat["slot_PsfFlux_fluxSigma"] = 1

        # The .xy.fits file has sources in the range ~ [0,2000],[0,4500]
        # which is bigger than the exposure
        self.bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0),
                                  afwGeom.Extent2I(2048, 4612))
        smallExposure = afwImage.ExposureF(
            os.path.join(testDir, "data", "v695833-e0-c000-a00.sci.fits"))
        self.exposure = afwImage.ExposureF(self.bbox)
        self.exposure.setWcs(smallExposure.getWcs())
        self.exposure.setFilter(smallExposure.getFilter())
        self.exposure.setCalib(smallExposure.getCalib())

        coordKey = self.srcCat.getCoordKey()
        centroidKey = self.srcCat.getCentroidKey()
        wcs = self.exposure.getWcs()
        for src in self.srcCat:
            src.set(coordKey, wcs.pixelToSky(src.get(centroidKey)))

        # Make a reference loader
        butler = Butler(RefCatDir)
        self.refObjLoader = LoadIndexedReferenceObjectsTask(butler=butler)
        logLevel = Log.TRACE
        self.log = Log.getLogger('testPhotoCal')
        self.log.setLevel(logLevel)

        self.config = PhotoCalConfig()
        self.config.match.matchRadius = 0.5
        self.config.match.referenceSelection.doMagLimit = True
        self.config.match.referenceSelection.magLimit.maximum = 22.0
        self.config.match.referenceSelection.magLimit.fluxField = "i_flux"
        self.config.match.referenceSelection.doFlags = True
        self.config.match.referenceSelection.flags.good = ['photometric']
        self.config.match.referenceSelection.flags.bad = ['resolved']
        self.config.match.sourceSelection.doUnresolved = False  # Don't have star/galaxy in the srcCat

        # The test and associated data have been prepared on the basis that we
        # use the PsfFlux to perform photometry.
        self.config.fluxField = "base_PsfFlux_flux"