示例#1
0
    def testLinearCombinationKernel(self):
        """Test LinearCombinationKernel using a set of delta basis functions
        """
        kWidth = 3
        kHeight = 2
        
        pol = pexPolicy.Policy()
        additionalData = dafBase.PropertySet()
        loc = dafPersist.LogicalLocation("tests/data/kernel5.boost")
        persistence = dafPersist.Persistence.getPersistence(pol)

        # create list of kernels
        basisImArrList = []
        kVec = afwMath.KernelList()
        for row in range(kHeight):
            for col in range(kWidth):
                kernel = afwMath.DeltaFunctionKernel(kWidth, kHeight, afwGeom.Point2I(col, row))
                basisImage = afwImage.ImageD(kernel.getDimensions())
                kernel.computeImage(basisImage, True)
                basisImArrList.append(basisImage.getArray().transpose().copy())
                kVec.append(kernel)
        
        kParams = [0.0]*len(kVec)
        k = afwMath.LinearCombinationKernel(kVec, kParams)
        for ii in range(len(kVec)):
            kParams = [0.0]*len(kVec)
            kParams[ii] = 1.0
            k.setKernelParameters(kParams)

            storageList = dafPersist.StorageList()
            storage = persistence.getPersistStorage("XmlStorage", loc)
            storageList.append(storage)
            persistence.persist(k, storageList, additionalData)

            storageList2 = dafPersist.StorageList()
            storage2 = persistence.getRetrieveStorage("XmlStorage", loc)
            storageList2.append(storage2)
            x = persistence.unsafeRetrieve("LinearCombinationKernel",
                    storageList2, additionalData)
            k2 = afwMath.LinearCombinationKernel.swigConvert(x)

            self.kernelCheck(k, k2)

            kIm = afwImage.ImageD(k2.getDimensions())
            k2.computeImage(kIm, True)
            kImArr = kIm.getArray().transpose()
            if not numpy.allclose(kImArr, basisImArrList[ii]):
                self.fail("%s = %s != %s for the %s'th basis kernel" % \
                    (k2.__class__.__name__, kImArr, basisImArrList[ii], ii))
示例#2
0
    def testFixedKernel(self):
        """Test FixedKernel using a ramp function
        """
        kWidth = 5
        kHeight = 6

        inArr = numpy.arange(kWidth * kHeight, dtype=float)
        inArr.shape = [kWidth, kHeight]

        inImage = afwImage.ImageD(afwGeom.Extent2I(kWidth, kHeight))
        for row in range(inImage.getHeight()):
            for col in range(inImage.getWidth()):
                inImage.set(col, row, inArr[col, row])

        k = afwMath.FixedKernel(inImage)

        pol = pexPolicy.Policy()
        additionalData = dafBase.PropertySet()
        loc = dafPersist.LogicalLocation("tests/data/kernel1.boost")
        persistence = dafPersist.Persistence.getPersistence(pol)

        storageList = dafPersist.StorageList()
        storage = persistence.getPersistStorage("XmlStorage", loc)
        storageList.append(storage)
        persistence.persist(k, storageList, additionalData)

        storageList2 = dafPersist.StorageList()
        storage2 = persistence.getRetrieveStorage("XmlStorage", loc)
        storageList2.append(storage2)
        x = persistence.unsafeRetrieve("FixedKernel", storageList2,
                                       additionalData)
        k2 = afwMath.FixedKernel.swigConvert(x)

        self.kernelCheck(k, k2)

        outImage = afwImage.ImageD(k2.getDimensions())
        k2.computeImage(outImage, False)

        outArr = outImage.getArray().transpose()
        if not numpy.allclose(inArr, outArr):
            self.fail("%s = %s != %s (not normalized)" % \
                    (k2.__class__.__name__, inArr, outArr))
        normInArr = inArr / inArr.sum()
        normOutImage = afwImage.ImageD(k2.getDimensions())
        k2.computeImage(normOutImage, True)
        normOutArr = normOutImage.getArray().transpose()
        if not numpy.allclose(normOutArr, normInArr):
            self.fail("%s = %s != %s (normalized)" % \
                    (k2.__class__.__name__, normInArr, normOutArr))
示例#3
0
文件: kernelIo1.py 项目: rnikutta/afw
    def testAnalyticKernel(self):
        """Test AnalyticKernel using a Gaussian function
        """
        kWidth = 5
        kHeight = 8

        pol = pexPolicy.Policy()
        additionalData = dafBase.PropertySet()
        loc = dafPersist.LogicalLocation("tests/data/kernel2.boost")
        persistence = dafPersist.Persistence.getPersistence(pol)

        gaussFunc = afwMath.GaussianFunction2D(1.0, 1.0, 0.0)
        k = afwMath.AnalyticKernel(kWidth, kHeight, gaussFunc)
        fArr = numpy.zeros(shape=[k.getWidth(), k.getHeight()], dtype=float)
        for xsigma in (0.1, 1.0, 3.0):
            for ysigma in (0.1, 1.0, 3.0):
                for angle in (0.0, 0.4, 1.1):
                    gaussFunc.setParameters((xsigma, ysigma, angle))
                    # compute array of function values and normalize
                    for row in range(k.getHeight()):
                        y = row - k.getCtrY()
                        for col in range(k.getWidth()):
                            x = col - k.getCtrX()
                            fArr[col, row] = gaussFunc(x, y)
                    fArr /= fArr.sum()
                    
                    k.setKernelParameters((xsigma, ysigma, angle))
    
                    storageList = dafPersist.StorageList()
                    storage = persistence.getPersistStorage("XmlStorage", loc)
                    storageList.append(storage)
                    persistence.persist(k, storageList, additionalData)
    
                    storageList2 = dafPersist.StorageList()
                    storage2 = persistence.getRetrieveStorage("XmlStorage", loc)
                    storageList2.append(storage2)
                    x = persistence.unsafeRetrieve("AnalyticKernel",
                            storageList2, additionalData)
                    k2 = afwMath.AnalyticKernel.swigConvert(x)
    
                    self.kernelCheck(k, k2)
    
                    kImage = afwImage.ImageD(k2.getDimensions())
                    k2.computeImage(kImage, True)
                    kArr = kImage.getArray().transpose()
                    if not numpy.allclose(fArr, kArr):
                        self.fail("%s = %s != %s for xsigma=%s, ysigma=%s" % \
                                (k2.__class__.__name__, kArr, fArr, xsigma, ysigma))
示例#4
0
    def testBoostPersistence(self):
        """Persist the image using boost"""
        with utilsTests.getTempFilePath(".boost") as boostFilePath:
            logicalLocation = dafPers.LogicalLocation(boostFilePath)
            storage = self.persistence.getPersistStorage("BoostStorage", logicalLocation)
            storageList = dafPers.StorageList([storage])
            self.persistence.persist(self.image, storageList, self.additionalData)

            # Retrieve it again
            storage = self.persistence.getRetrieveStorage("BoostStorage", logicalLocation)
            storageList = dafPers.StorageList([storage])
            pers2Ptr = self.persistence.unsafeRetrieve("ImageF", storageList, self.additionalData)
            image2 = afwImage.ImageF.swigConvert(pers2Ptr)
            
            # Check the resulting Image
            self.checkImages(self.image, image2)
    def testFitsPersistence(self):
        """Test persisting to FITS"""

        # Set up the LogicalLocation.
        logicalLocation = dafPers.LogicalLocation(self.infile)

        # Create a FitsStorage and put it in a StorageList.
        storage = self.persistence.getRetrieveStorage("FitsStorage", logicalLocation)
        storageList = dafPers.StorageList([storage])

        # Let's do the retrieval!
        maskedImage2 = afwImage.MaskedImageF.swigConvert( \
            self.persistence.unsafeRetrieve("MaskedImageF", storageList, self.additionalData))

        # Check the resulting MaskedImage
        self.checkImages(self.maskedImage, maskedImage2)
示例#6
0
    def testFitsPersistence(self):
        """Test unpersisting from FITS"""

        # Set up the LogicalLocation.
        logicalLocation = dafPers.LogicalLocation(
            os.path.join("tests", "data", "HSC-0908120-056-small.fits"))

        # Create a FitsStorage and put it in a StorageList.
        storage = self.persistence.getRetrieveStorage("FitsStorage",
                                                      logicalLocation)
        storageList = dafPers.StorageList([storage])

        # Let's do the retrieval!
        propertyList = self.persistence.unsafeRetrieve("PropertyList",
                                                       storageList, None)

        self.assertEqual(propertyList.get("AR_HDU"), 5)
示例#7
0
    def testDeltaFunctionKernel(self):
        """Test DeltaFunctionKernel
        """
        pol = pexPolicy.Policy()
        additionalData = dafBase.PropertySet()
        loc = dafPersist.LogicalLocation(
            os.path.join(testPath, "data", "kernel3.boost"))
        persistence = dafPersist.Persistence.getPersistence(pol)

        for kWidth in range(1, 4):
            for kHeight in range(1, 4):
                for activeCol in range(kWidth):
                    for activeRow in range(kHeight):
                        kernel = afwMath.DeltaFunctionKernel(
                            kWidth, kHeight,
                            afwGeom.Point2I(activeCol, activeRow))

                        storageList = dafPersist.StorageList()
                        storage = persistence.getPersistStorage(
                            "XmlStorage", loc)
                        storageList.append(storage)
                        persistence.persist(kernel, storageList,
                                            additionalData)

                        storageList2 = dafPersist.StorageList()
                        storage2 = persistence.getRetrieveStorage(
                            "XmlStorage", loc)
                        storageList2.append(storage2)
                        x = persistence.unsafeRetrieve("DeltaFunctionKernel",
                                                       storageList2,
                                                       additionalData)
                        k2 = afwMath.DeltaFunctionKernel.swigConvert(x)

                        self.kernelCheck(kernel, k2)
                        self.assertEqual(kernel.getPixel(), k2.getPixel())

                        kImage = afwImage.ImageD(k2.getDimensions())
                        kSum = k2.computeImage(kImage, False)
                        self.assertEqual(kSum, 1.0)
                        kArr = kImage.getArray().transpose()
                        self.assertEqual(kArr[activeCol, activeRow], 1.0)
                        kArr[activeCol, activeRow] = 0.0
                        self.assertEqual(kArr.sum(), 0.0)
示例#8
0
    def testBoostPersistenceU16(self):
        """Persist a U16 image using boost"""
        with utilsTests.getTempFilePath(".boost") as boostFilePath:
            logicalLocation = dafPers.LogicalLocation(boostFilePath)
            storage = self.persistence.getPersistStorage("BoostStorage", logicalLocation)
            storageList = dafPers.StorageList([storage])
            #
            # Read a U16 image
            #
            self.image = self.image.Factory(os.path.join(dataDir, "data", "small_MI.fits"))
            self.persistence.persist(self.image, storageList, self.additionalData)

            # Retrieve it again
            storage = self.persistence.getRetrieveStorage("BoostStorage", logicalLocation)
            storageList = dafPers.StorageList([storage])
            pers2Ptr = self.persistence.unsafeRetrieve("ImageF", storageList, self.additionalData)
            image2 = afwImage.ImageF.swigConvert(pers2Ptr)

            # Check the resulting Image
            self.checkImages(self.image, image2)
def unpersistPsf(xmlPath):
    """Read a PSF from an XML file"""
    # Set up persistence object
    pol = pexPolicy.Policy()
    persistence = dafPersist.Persistence.getPersistence(pol)

    # Where is the file on disk? Make a storage object
    loc = dafPersist.LogicalLocation(xmlPath)
    storageList = dafPersist.StorageList()
    storage = persistence.getRetrieveStorage('XmlStorage', loc)
    storageList.append(storage)

    # Capture any associated metadata
    metadata = dafBase.PropertySet()

    # Unpersist the object; you need to say which object in storage to grab
    persistable = persistence.retrieve('pcaPsf', storageList, metadata)

    # Cast to a PSF model
    psf = measAlg.PSF_swigConvert(persistable)
    return psf
示例#10
0
    def testBoostPersistence(self):
        """Persist the image using boost"""
        boostFilePath = "image.boost"
        logicalLocation = dafPers.LogicalLocation(boostFilePath)
        storage = self.persistence.getPersistStorage("BoostStorage",
                                                     logicalLocation)
        storageList = dafPers.StorageList([storage])
        self.persistence.persist(self.image, storageList, self.additionalData)

        # Retrieve it again
        storage = self.persistence.getRetrieveStorage("BoostStorage",
                                                      logicalLocation)
        storageList = dafPers.StorageList([storage])
        pers2Ptr = self.persistence.unsafeRetrieve("ImageF", storageList,
                                                   self.additionalData)
        image2 = afwImage.ImageF.swigConvert(pers2Ptr)

        # Check the resulting Image
        self.checkImages(self.image, image2)

        # Delete the boost-persisted image
        os.remove(boostFilePath)
    def test3(self):
        """
        Test PropertySet persistence to database with policy mapping itemName to
        database table name and mapping property keys to table columns.
        """

        dp = dafBase.PropertySet()
        dp.addInt("i", 3)
        dp.addDouble("d", 3.4)
        dp.addString("v", "LastOne")
        dp.addBool("b", True)
        dp.addLongLong("I", 9998887776)
        dp.addFloat("f", 1.414)

        pol = pexPolicy.Policy()
        itemPol = pexPolicy.Policy()
        itemPol.set("TableName", "Persistence_Test_2")
        itemPol.add("KeyList", "floatField=f")
        itemPol.add("KeyList", "int64Field=I")
        itemPol.add("KeyList", "boolField=b")
        itemPol.add("KeyList", "varcharField=v")
        itemPol.add("KeyList", "doubleField=d")
        itemPol.add("KeyList", "intField=i")
        pol.set("Formatter.PropertySet.testItem", itemPol)

        additionalData = dafBase.PropertySet()
        additionalData.add("itemName", "testItem")

        loc = dafPersist.LogicalLocation("mysql://{}:{}/test".format(HOST, PORT))

        persistence = dafPersist.Persistence.getPersistence(pol)

        storageList = dafPersist.StorageList()
        storage = persistence.getPersistStorage("DbStorage", loc)
        storageList.append(storage)
        persistence.persist(dp, storageList, additionalData)
示例#12
0
    def testSVLinearCombinationKernel(self):
        """Test a spatially varying LinearCombinationKernel
        """
        kWidth = 3
        kHeight = 2

        pol = pexPolicy.Policy()
        additionalData = dafBase.PropertySet()
        loc = dafPersist.LogicalLocation(
            os.path.join(testPath, "data", "kernel6.boost"))
        persistence = dafPersist.Persistence.getPersistence(pol)

        # create image arrays for the basis kernels
        basisImArrList = []
        imArr = np.zeros((kWidth, kHeight), dtype=float)
        imArr += 0.1
        imArr[kWidth // 2, :] = 0.9
        basisImArrList.append(imArr)
        imArr = np.zeros((kWidth, kHeight), dtype=float)
        imArr += 0.2
        imArr[:, kHeight // 2] = 0.8
        basisImArrList.append(imArr)

        # create a list of basis kernels from the images
        kVec = []
        for basisImArr in basisImArrList:
            basisImage = afwImage.makeImageFromArray(
                basisImArr.transpose().copy())
            kernel = afwMath.FixedKernel(basisImage)
            kVec.append(kernel)

        # create spatially varying linear combination kernel
        spFunc = afwMath.PolynomialFunction2D(1)

        # spatial parameters are a list of entries, one per kernel parameter;
        # each entry is a list of spatial parameters
        sParams = (
            (0.0, 1.0, 0.0),
            (0.0, 0.0, 1.0),
        )

        k = afwMath.LinearCombinationKernel(kVec, spFunc)
        k.setSpatialParameters(sParams)

        storageList = dafPersist.StorageList()
        storage = persistence.getPersistStorage("XmlStorage", loc)
        storageList.append(storage)
        persistence.persist(k, storageList, additionalData)

        storageList2 = dafPersist.StorageList()
        storage2 = persistence.getRetrieveStorage("XmlStorage", loc)
        storageList2.append(storage2)
        k2 = persistence.unsafeRetrieve("LinearCombinationKernel",
                                        storageList2, additionalData)

        self.kernelCheck(k, k2)

        kImage = afwImage.ImageD(afwGeom.Extent2I(kWidth, kHeight))
        for colPos, rowPos, coeff0, coeff1 in [
            (0.0, 0.0, 0.0, 0.0),
            (1.0, 0.0, 1.0, 0.0),
            (0.0, 1.0, 0.0, 1.0),
            (1.0, 1.0, 1.0, 1.0),
            (0.5, 0.5, 0.5, 0.5),
        ]:
            k2.computeImage(kImage, False, colPos, rowPos)
            kImArr = kImage.getArray().transpose()
            refKImArr = (basisImArrList[0] * coeff0) + \
                (basisImArrList[1] * coeff1)
            if not np.allclose(kImArr, refKImArr):
                self.fail(
                    "%s = %s != %s at colPos=%s, rowPos=%s" %
                    (k2.__class__.__name__, kImArr, refKImArr, colPos, rowPos))
示例#13
0
 def _massagePolicy(self):
     loc = persistence.LogicalLocation(self._policy.get('filterTableLocation'))
     self._policy.set('filterTableLocation', loc.locString())
示例#14
0
    def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
                       posixIfNoSql=True):
        """Set up a registry (usually SQLite3), trying a number of possible
        paths.
        Parameters
        ----------
        name : string
            Name of registry.
        description: `str`
            Description of registry (for log messages)
        path : string
            Path for registry.
        policy : string
            Policy that contains the registry name, used if path is None.
        policyKey : string
            Key in policy for registry path.
        storage : Storage subclass
            Repository Storage to look in.
        searchParents : bool, optional
            True if the search for a registry should follow any Butler v1
            _parent symlinks.
        posixIfNoSql : bool, optional
            If an sqlite registry is not found, will create a posix registry if
            this is True.
        Returns
        -------
        lsst.daf.persistence.Registry
            Registry object
        """
        if path is None and policyKey in policy:
            path = dafPersist.LogicalLocation(policy[policyKey]).locString()
            if os.path.isabs(path):
                raise RuntimeError("Policy should not indicate an absolute path for registry.")
            if not storage.exists(path):
                newPath = storage.instanceSearch(path)

                newPath = newPath[0] if newPath is not None and len(newPath) else None
                if newPath is None:
                    self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
                                  path)
                path = newPath
            else:
                self.log.warn("Unable to locate registry at policy path: %s", path)
                path = None

        # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
        # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
        # root from path. Currently only works with PosixStorage
        try:
            root = storage.root
            if path and (path.startswith(root)):
                path = path[len(root + '/'):]
        except AttributeError:
            pass

        # determine if there is an sqlite registry and if not, try the posix registry.
        registry = None

        def search(filename, description):
            """Search for file in storage
            Parameters
            ----------
            filename : `str`
                Filename to search for
            description : `str`
                Description of file, for error message.
            Returns
            -------
            path : `str` or `None`
                Path to file, or None
            """
            result = storage.instanceSearch(filename)
            if result:
                return result[0]
            self.log.debug("Unable to locate %s: %s", description, filename)
            return None

        # Search for a suitable registry database
        if path is None:
            path = search("%s.pgsql" % name, "%s in root" % description)
        if path is None:
            path = search("%s.sqlite3" % name, "%s in root" % description)
        if path is None:
            path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)

        if path is not None:
            if not storage.exists(path):
                newPath = storage.instanceSearch(path)
                newPath = newPath[0] if newPath is not None and len(newPath) else None
                if newPath is not None:
                    path = newPath
            localFileObj = storage.getLocalFile(path)
            self.log.info("Loading %s registry from %s", description, localFileObj.name)
            registry = dafPersist.Registry.create(localFileObj.name)
            localFileObj.close()
        elif not registry and posixIfNoSql:
            try:
                self.log.info("Loading Posix %s registry from %s", description, storage.root)
                registry = dafPersist.PosixRegistry(storage.root)
            except Exception:
                registry = None

        return registry
示例#15
0
    def __init__(self, root=None, registry=None, calibRoot=None, calibRegistry=None,
                 provided=None, parentRegistry=None, repositoryCfg=None):
        """Initialize the CameraMapper.
        Parameters
        ----------
        policy : daf_persistence.Policy,
            Can also be pexPolicy.Policy, only for backward compatibility.
            Policy with per-camera defaults already merged.
        repositoryDir : string
            Policy repository for the subclassing module (obtained with
            getRepositoryPath() on the per-camera default dictionary).
        root : string, optional
            Path to the root directory for data.
        registry : string, optional
            Path to registry with data's metadata.
        calibRoot : string, optional
            Root directory for calibrations.
        calibRegistry : string, optional
            Path to registry with calibrations' metadata.
        provided : list of string, optional
            Keys provided by the mapper.
        parentRegistry : Registry subclass, optional
            Registry from a parent repository that may be used to look up
            data's metadata.
        repositoryCfg : daf_persistence.RepositoryCfg or None, optional
            The configuration information for the repository this mapper is
            being used with.
        """
        policyFile = Policy.defaultPolicyFile("obs_hsc_sims", "HscSimsMapper.yaml", "policy")
        policy = Policy(policyFile)

        dafPersist.Mapper.__init__(self)

        self.log = lsstLog.Log.getLogger("HscSimsMapper")

        if root:
            self.root = root
        elif repositoryCfg:
            self.root = repositoryCfg.root
        else:
            self.root = None
        if isinstance(policy, pexPolicy.Policy):
            policy = dafPersist.Policy(policy)

        repoPolicy = repositoryCfg.policy if repositoryCfg else None
        if repoPolicy is not None:
            policy.update(repoPolicy)

        # Don't load the default policy from obs_base
        # defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base",
        #                                                        "MapperDictionary.paf",
        #                                                        "policy")
        # dictPolicy = dafPersist.Policy(defaultPolicyFile)
        # policy.merge(dictPolicy)

        # Levels
        self.levels = dict()
        if 'levels' in policy:
            levelsPolicy = policy['levels']
            for key in levelsPolicy.names(True):
                self.levels[key] = set(levelsPolicy.asArray(key))
        self.defaultLevel = policy['defaultLevel']
        self.defaultSubLevels = dict()
        if 'defaultSubLevels' in policy:
            self.defaultSubLevels = policy['defaultSubLevels']

        # Root directories
        if root is None:
            root = "."
        root = dafPersist.LogicalLocation(root).locString()

        self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)

        # If the calibRoot is passed in, use that. If not and it's indicated in
        # the policy, use that. And otherwise, the calibs are in the regular
        # root.
        # If the location indicated by the calib root does not exist, do not
        # create it.
        calibStorage = None
        if calibRoot is not None:
            calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
            calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
                                                          create=False)
        else:
            calibRoot = policy.get('calibRoot', None)
            if calibRoot:
                calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
                                                              create=False)
        if calibStorage is None:
            calibStorage = self.rootStorage

        self.root = root

        # Registries
        self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
                                            self.rootStorage, searchParents=False,
                                            posixIfNoSql=True)
        if not self.registry:
            self.registry = parentRegistry
        needCalibRegistry = policy.get('needCalibRegistry', None)
        if needCalibRegistry:
            if calibStorage:
                self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
                                                         "calibRegistryPath", calibStorage,
                                                         posixIfNoSql=False)  # NB never use posix for calibs
            else:
                raise RuntimeError(
                    "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
                    "calibRoot ivar:%s or policy['calibRoot']:%s" %
                    (calibRoot, policy.get('calibRoot', None)))
        else:
            self.calibRegistry = None

        # Dict of valid keys and their value types
        self.keyDict = dict()

        self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
        self._initWriteRecipes()

        # Camera geometry
        # #self.cameraDataLocation = None  # path to camera geometry config file
        # #self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)

        # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
        # camera package, which is on the local filesystem.
        # #self.defectRegistry = None
        # #if 'defects' in policy:
        # #    self.defectPath = os.path.join(repositoryDir, policy['defects'])
        # #    defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
        # #    self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)

        # Filter translation table
        self.filters = None
示例#16
0
def fetchOrbitIdsAndEphems(dbLogicalLocation,
                           sliceId,
                           numSlices,
                           mjd,
                           deltaMJD=1.):
    """
    Fetch the orbit Id of all known moving objects from day-MOPS together with
    their precomputed ephemerides at int(mjd)-deltaMJD, int(mjd) and
    int(mjd)+deltaMJD.
    
    @param dbLogicalLocation: pointer to the DB.
    @param sliceId: slice ID.
    @param numSlices: total number of slices.
    @param mjd: MJD of the exposure (UTC).
    @param deltaMJD: temporal distance betweeb successive ephemerides.

    Return
        [(internal_orbitId: Ephemeris obj), ] sorted by mjd
    """
    # Init the persistance middleware.
    if (RIDICOLOUSLY_VERBOSE):
        t3 = time.time()
    db = dafPer.DbStorage()

    # Connect to the DB.
    loc = dafPer.LogicalLocation(dbLogicalLocation)
    db.setRetrieveLocation(loc)
    if (RIDICOLOUSLY_VERBOSE):
        logit('     %.02fs: connect to DB' % (time.time() - t3))

    # Prepare the query.
    if (RIDICOLOUSLY_VERBOSE):
        t3 = time.time()
    deltaMJD = abs(deltaMJD)
    mjdMin = mjd - deltaMJD
    mjdMax = mjd + deltaMJD

    # TODO: handle different MovingObject versions. Meaning choose the highest
    # version. Not needed for DC3a.
    where = 'mjd >= %f and mjd <= %f' % (mjdMin, mjdMax)
    # Poor man parallelism ;-)
    # if(numSlices > 0):
    if (numSlices > 1):
        where += ' and movingObjectId %% %d = %d' % (numSlices, sliceId)

    db.startTransaction()
    db.setTableForQuery('_tmpl_mops_Ephemeris')
    db.setQueryWhere(where)
    db.outColumn('movingObjectId')
    db.outColumn('movingObjectVersion')
    db.outColumn('mjd')
    db.outColumn('ra')
    db.outColumn('decl')
    db.outColumn('mag')
    db.outColumn('smaa')
    db.outColumn('smia')
    db.outColumn('pa')
    db.orderBy('movingObjectId')
    db.orderBy('mjd')
    if (RIDICOLOUSLY_VERBOSE):
        logit('     %.02fs: prepare SQL' % (time.time() - t3))

    # Execute the query.
    if (RIDICOLOUSLY_VERBOSE):
        t3 = time.time()
    db.query()
    if (RIDICOLOUSLY_VERBOSE):
        logit('     %.02fs: exec SQL' % (time.time() - t3))

    # Fetch the results.
    if (RIDICOLOUSLY_VERBOSE):
        t0 = time.time()
        t1 = 0
        t2 = 0
    res = []
    while db.next():
        if (RIDICOLOUSLY_VERBOSE):
            tt2 = time.time()
        movingObjectId = db.getColumnByPosInt64(0)
        movingObjectVersion = db.getColumnByPosInt64(1)
        ephem = (
            movingObjectId,  # movingObjectId
            movingObjectVersion,  # movingObjectVersion
            db.getColumnByPosDouble(2),  # mjd
            db.getColumnByPosDouble(3),  # ra
            db.getColumnByPosDouble(4),  # decl
            db.getColumnByPosDouble(5),  # mag
            db.getColumnByPosDouble(6),  # smaa
            db.getColumnByPosDouble(7),  # smia
            db.getColumnByPosDouble(8))  # pa
        if (RIDICOLOUSLY_VERBOSE):
            t2 += time.time() - tt2
        # We now create a new temp id made by concatenating the movingobject id
        # and its version. It will only be used internally.
        # res= [(new_orbit_id, Ephemeris obj), ...]
        if (RIDICOLOUSLY_VERBOSE):
            tt1 = time.time()
        res.append(('%d-%d' % (movingObjectId, movingObjectVersion), ephem))
        if (RIDICOLOUSLY_VERBOSE):
            t1 += time.time() - tt1

    # We are done with the query.
    db.finishQuery()
    if (RIDICOLOUSLY_VERBOSE):
        logit('     %.02fs: fetch res' % (time.time() - t0 - t1 - t2))
        logit('     %.02fs: Ephemeris()' % (t2))
        logit('     %.02fs: results.append()' % (t1))
    return (res)
示例#17
0
#!/usr/bin/env python
import sys
import lsst.daf.persistence as persistence


if(len(sys.argv) != 2):
    sys.stderr.write('usage: cleanall.py <database name>\n')
    sys.stderr.flush()
    sys.exit(1)
database = sys.argv[1]

db = persistence.DbStorage()
db.setRetrieveLocation(persistence.LogicalLocation('mysql://localhost:3306/%s' %(database)))
db.startTransaction()
db.executeSql('delete from DIASourceIDTonight')
db.executeSql('delete from mops_Tracklet')
db.executeSql('delete from mops_TrackletsToDIASource')
db.executeSql('delete from mops_TracksToTracklet')
db.endTransaction()
sys.exit(0)
示例#18
0
def _output(stage, policy, clipboard, log):
    """Perform the actual persistence.
    
    @param stage     The stage requesting output.
    @param policy    The policy for the stage.
    @param clipboard The clipboard for the stage.  The persisted objects are taken from this.
    @param log       A logger for messages.
    """

    if not policy.exists('parameters.outputItems'):
        # Propagate the clipboard to the output queue, but otherwise
        # do nothing.
        log.log(Log.WARN, "No outputItems found")
        return

    mainAdditionalData = lsst.pex.harness.Utils.createAdditionalData(
        stage, policy, clipboard)

    # Create a persistence object using policy, if present.
    if policy.exists('parameters.persistence'):
        persistencePolicy = pexPolicy.Policy(
            policy.getPolicy('parameters.persistence'))
    else:
        persistencePolicy = pexPolicy.Policy()
    persistence = dafPersist.Persistence.getPersistence(persistencePolicy)

    # Iterate over items in OutputItems policy.
    outputPolicy = policy.getPolicy('parameters.outputItems')
    itemNames = outputPolicy.policyNames(True)
    somethingWasOutput = False

    for item in itemNames:

        additionalData = mainAdditionalData.deepCopy()

        itemPolicy = outputPolicy.getPolicy(item)

        # Skip the item if it is not required and is not present.
        itemRequired = itemPolicy.exists('required') and \
                itemPolicy.getBool('required')
        if not clipboard.contains(item):
            if itemRequired:
                raise RuntimeError, 'Missing output item: ' + item
            else:
                continue

        itemData = clipboard.get(item)

        # Add the item name to the additionalData.
        additionalData.set('itemName', item)

        if itemPolicy.exists('datasetId'):
            dsPolicy = itemPolicy.getPolicy('datasetId')
            ds = Dataset(dsPolicy.get('datasetType'))
            ds.ids = {}
            if dsPolicy.exists('set'):
                setPolicy = dsPolicy.getPolicy('set')
                for param in setPolicy.paramNames():
                    ds.ids[param] = setPolicy.get(param)
                    additionalData.set(param, setPolicy.get(param))
            if dsPolicy.exists('fromJobIdentity'):
                jobIdentity = clipboard.get(
                    policy.get('inputKeys.jobIdentity'))
                for id in dsPolicy.getStringArray('fromJobIdentity'):
                    ds.ids[id] = jobIdentity[id]
                    additionalData.set(id, jobIdentity[id])
            outputKey = policy.get('outputKeys.outputDatasets')
            dsList = clipboard.get(outputKey)
            if dsList is None:
                dsList = []
                clipboard.put(outputKey, dsList)
            dsList.append(ds)
            if stage.butler is not None:
                # Use the butler to figure out storage and locations.
                # Write Using Butler
                iolog = BlockTimingLog(log, "write_using_butler", Log.INFO - 1)
                iolog.start("persisting %s as %s with keys %s" %
                            (item, ds.type, ds.ids))
                stage.butler.put(itemData, ds.type, dataId=ds.ids)
                iolog.done()
                somethingWasOutput = True
                continue

        # Get the item's StoragePolicy.
        if itemPolicy.isArray('storagePolicy'):
            policyList = itemPolicy.getPolicyArray('storagePolicy')
        else:
            policyList = []
            policyList.append(itemPolicy.getPolicy('storagePolicy'))

        # Create a list of Storages for the item based on policy.
        storageList = dafPersist.StorageList()
        for storagePolicy in policyList:
            storageName = storagePolicy.getString('storage')
            location = storagePolicy.getString('location')
            logLoc = dafPersist.LogicalLocation(location, additionalData)
            log.log(Log.INFO - 1,
                    "persisting %s to %s" % (item, logLoc.locString()))
            additionalData.add('StorageLocation.' + storageName,
                               logLoc.locString())
            mainAdditionalData.add('StorageLocation.' + storageName,
                                   logLoc.locString())
            storage = persistence.getPersistStorage(storageName, logLoc)
            storageList.append(storage)

        # Persist the item.

        # Write Without Butler
        iolog = BlockTimingLog(log, "write_without_butler", Log.INFO - 1)
        iolog.start("persisting %s" % (item, ))
        if hasattr(itemData, '__deref__'):
            persistence.persist(itemData.__deref__(), storageList,
                                additionalData)
        else:
            persistence.persist(itemData, storageList, additionalData)
        iolog.done()
        somethingWasOutput = True

    if not somethingWasOutput:
        log.log(Log.WARN, "No items were output")