Beispiel #1
0
 def retrieveGroups(self, request):
     fn = request.getFilename()
     f, lock = self.__openFile(fn, 'r')
     try:
         groups = request.getGroups()
         req = request.getRequest()
         recs = []
         rootNode = f['/']
         for group in groups:
             grp = self.__getNode(rootNode, group)
             datasets = grp.keys()
             for ds in datasets:
                 dsNode = grp[ds]
                 rawData = HDF5OpManager.read(dsNode, req)
                 rec = DataStoreFactory.createStorageRecord(
                     rawData, dsNode, req)
                 recs.append(rec)
         resp = RetrieveResponse()
         resp.setRecords(recs)
         return resp
     finally:
         t0 = time.time()
         f.close()
         t1 = time.time()
         timeMap['closeFile'] = t1 - t0
         LockManager.releaseLock(lock)
Beispiel #2
0
 def __removeFile(self, path):
     gotLock = False
     try:
         gotLock, lock = LockManager.getLock(path, 'a')
         if gotLock:
             os.remove(path)
         else:
             raise StorageException('Unable to acquire lock on file ' +
                                    path + ' for deleting')
     finally:
         if gotLock:
             LockManager.releaseLock(lock)
Beispiel #3
0
 def getDatasets(self, request):
     fn = request.getFilename()
     f, lock = self.__openFile(fn, 'r')
     try:
         grpName = request.getGroup()
         grp = self.__getNode(f['/'], grpName)
         ds = grp.keys()
         return ds
     finally:
         t0 = time.time()
         f.close()
         t1 = time.time()
         timeMap['closeFile'] = t1 - t0
         LockManager.releaseLock(lock)
Beispiel #4
0
 def __removeDir(self, path, onlyIfEmpty=False):
     gotLock = False
     try:
         gotLock, lock = LockManager.getLock(path, 'a')
         if gotLock:
             if onlyIfEmpty:
                 os.rmdir(path)
             else:
                 shutil.rmtree(path)
         else:
             raise StorageException('Unable to acquire lock on file ' +
                                    path + ' for deleting')
     finally:
         if gotLock:
             LockManager.releaseLock(lock)
Beispiel #5
0
 def __openFile(self, filename, mode='r'):
     if mode == 'r' and not os.path.exists(filename):
         raise StorageException('File ' + filename + ' does not exist')
     gotLock, fd = LockManager.getLock(filename, mode)
     t0 = time.time()
     if not gotLock:
         raise StorageException('Unable to acquire lock on file ' +
                                filename)
     try:
         if mode == 'w' and os.path.exists(filename):
             mode = 'a'
         f = h5py.File(filename, mode)
     except Exception, e:
         msg = "Unable to open file " + filename + ": " + IDataStore._exc()
         logger.error(msg)
         LockManager.releaseLock(fd)
         raise e
Beispiel #6
0
    def retrieveDatasets(self, request):
        t6 = time.time()
        fn = request.getFilename()
        t0 = time.time()
        f, lock = self.__openFile(fn, 'r')
        t1 = time.time()
        t2 = 0
        t3 = 0
        names = []
        paramMap = {}
        timeSpentReading = 0
        try:
            names = request.getDatasetGroupPath()
            req = request.getRequest()
            result = []
            rootNode = f['/']
            for dsName in names:
                ds = self.__getNode(rootNode, None, dsName)
                t2 = time.time()
                rawData = HDF5OpManager.read(ds, req)
                t3 = time.time()
                diff = t3 - t2
                timeSpentReading += diff
                paramMap[dsName] = ('%.3f' % (diff))
                rec = DataStoreFactory.createStorageRecord(rawData, ds, req)
                result.append(rec)

            resp = RetrieveResponse()
            resp.setRecords(result)
            return resp
        finally:
            f.close()
            t4 = time.time()
            LockManager.releaseLock(lock)
            t5 = time.time()
            if logger.isEnabledFor(logging.DEBUG):
                logger.debug("pid=" + str(os.getpid()) + " filename=" + fn +
                             ", numberOfDatasets/Parameters=" +
                             str(len(names)) + ", getLockTime=" +
                             ('%.3f' % (t1 - t0)) + ", readDataTime=" +
                             ('%.3f' % (timeSpentReading)) +
                             ", releaseLockTime=" + ('%.3f' % (t5 - t4)) +
                             ", retrieveDatasetsTotal=" + ('%.3f' %
                                                           (t4 - t6)) +
                             ", perParamRead=" + str(paramMap))
Beispiel #7
0
    def createDataset(self, request):
        fn = request.getFilename()
        f, lock = self.__openFile(fn, 'w')
        try:
            rec = request.getRecord()
            props = rec.getProps()
            if props and not props.getChunked(
            ) and props.getCompression != 'NONE':
                raise StorageException("Data must be chunked to be compressed")
            grp = rec.getGroup()
            group = self.__getNode(f['/'], grp, None, create=True)

            # reverse sizes for hdf5
            szDims = rec.getSizes()
            szDims1 = self.__reverseDimensions(szDims)
            szDims = tuple(szDims1)

            chunks = None
            if props and props.getChunked():
                chunks = (DEFAULT_CHUNK_SIZE, ) * len(szDims)

            compression = None
            if props:
                compression = props.getCompression()

            dtype = self.__getHdf5Datatype(rec)
            datasetName = rec.getName()
            fillValue = rec.getFillValue()
            ds = self.__createDatasetInternal(group, datasetName, dtype,
                                              szDims, szDims, chunks,
                                              compression, fillValue)
            self.__writeProperties(rec, ds)
            f.flush()
            resp = StoreResponse()
            return resp
        finally:
            t0 = time.time()
            f.close()
            t1 = time.time()
            timeMap['closeFile'] = t1 - t0
            LockManager.releaseLock(lock)
Beispiel #8
0
    def store(self, request):
        fn = request.getFilename()
        recs = request.getRecords()
        self.__prepareRecordsToStore(recs)
        f, lock = self.__openFile(fn, 'w')
        try:
            op = request.getOp()
            status = StorageStatus()
            exc = []
            failRecs = []
            ss = None
            t0 = time.time()
            for r in recs:
                try:
                    ss = self.__writeHDF(f, r, op)
                except:
                    logger.warn("Exception occurred on file " + fn + ":" +
                                IDataStore._exc())
                    exc.append(IDataStore._exc())
                    # Clear out data so we don't send the whole thing back to the client.
                    # NOTE: This assumes pypies no longer needs the data
                    r.putDataObject(None)
                    failRecs.append(r)

            if ss:
                status.setOperationPerformed(ss['op'])
                if 'index' in ss:
                    status.setIndexOfAppend(ss['index'])
            t1 = time.time()
            timeMap['store'] = t1 - t0
            resp = StoreResponse()
            resp.setStatus(status)
            resp.setExceptions(exc)
            resp.setFailedRecords(failRecs)
            return resp
        finally:
            t0 = time.time()
            f.close()
            t1 = time.time()
            timeMap['closeFile'] = t1 - t0
            LockManager.releaseLock(lock)
Beispiel #9
0
 def retrieve(self, request):
     fn = request.getFilename()
     f, lock = self.__openFile(fn, 'r')
     try:
         group = request.getGroup()
         req = request.getRequest()
         rootNode = f['/']
         if req:
             ds = self.__getNode(rootNode, group, request.getDataset())
             result = [self.__retrieveInternal(ds, req)]
         else:
             groupNode = self.__getNode(rootNode, group)
             result = self.__retrieve(groupNode)
         resp = RetrieveResponse()
         resp.setRecords(result)
         return resp
     finally:
         t0 = time.time()
         f.close()
         t1 = time.time()
         timeMap['closeFile'] = t1 - t0
         LockManager.releaseLock(lock)
Beispiel #10
0
                             str(fn) + ']: ' + IDataStore._exc())

            t0 = time.time()
            f.close()
            t1 = time.time()
            timeMap['closeFile'] = t1 - t0

            if deleteFile:
                logger.info('Removing empty file [' + str(fn) + ']')
                try:
                    os.remove(fn)
                except Exception, e:
                    logger.error('Error occurred deleting file [' + str(fn) +
                                 ']: ' + IDataStore._exc())

            LockManager.releaseLock(lock)
        return resp

    # recursively looks for data sets
    def __hasDataSet(self, group):
        for key in group:
            child = group[key]
            if type(child) == h5py.highlevel.Dataset:
                return True
            elif type(child) == h5py.highlevel.Group:
                if self.__hasDataSet(child):
                    return True
        return False

    def retrieve(self, request):
        fn = request.getFilename()