示例#1
0
def test_non_unique_fail():
    shape = (25, 32)
    fb = insert_resource('syn-mod', None, {'shape': shape})
    r_id = str(uuid.uuid4())
    insert_datum(str(fb['id']), r_id, {'n': 0})
    with pytest.raises(pymongo.errors.DuplicateKeyError):
        insert_datum(str(fb['id']), r_id, {'n': 1})
示例#2
0
    def _make_data(self):
        self.th = np.linspace(0, 2*np.pi, self.n_pts)
        self.scale = np.arange(self.N*self.M)

        with h5py.File(self.filename, 'w') as f:
            # create a group for maps to hold the data
            mapsGrp = f.create_group('MAPS')
            # now set a comment
            mapsGrp.attrs['comments'] = 'MAPS group'

            entryname = 'mca_arr'
            comment = 'These are raw spectrum data.'
            sn = np.sin(self.th).reshape(self.n_pts, 1, 1)
            XY = self.scale.reshape(1, self.N, self.M)
            data = XY * sn
            ds_data = mapsGrp.create_dataset(entryname, data=data)
            ds_data.attrs['comments'] = comment

        # insert spectrum-wise resource and datum
        resource_id = insert_resource('hdf5_maps', self.filename,
                                      {'dset_path': 'mca_arr'})
        self.eids_spectrum = [str(uuid.uuid4()) for j in range(self.N*self.M)]

        for uid, (i, j) in zip(self.eids_spectrum,
                               product(range(self.N), range(self.M))):
            insert_datum(resource_id, uid, {'x': i, 'y': j})

        # insert plane-wise resource and datum
        resource_id = insert_resource('hdf5_planes', self.filename,
                                      {'dset_path': 'mca_arr'})
        self.eids_planes = [str(uuid.uuid4()) for j in range(self.n_pts)]

        for uid, n in zip(self.eids_planes, range(self.n_pts)):
            insert_datum(resource_id, uid, {'e_index': n})
    def collect(self):
        """
        Record a 'datum' document in the filestore database for each encoder.

        Return a dictionary with references to these documents.
        """
        print('collect', self.name)
        self._ready_to_collect = False

        now = ttime.time()
        # Create an Event document and a datum record in filestore for each line
        # in the text file.
        now = ttime.time()
        ttime.sleep(1)  # wait for file to be written by pizza box
        with open(self._full_path, 'r') as f:
            linecount = 0
            for ln in f:
                linecount += 1

        chunk_count = linecount // self.chunk_size + int(
            linecount % self.chunk_size != 0)
        for chunk_num in range(chunk_count):
            datum_uid = str(uuid.uuid4())
            data = {self.name: datum_uid}
            fs.insert_datum(self.resource_uid, datum_uid,
                            {'chunk_num': chunk_num})
            yield {
                'data': data,
                'timestamps': {key: now
                               for key in data},
                'time': now
            }
示例#4
0
def _insert_syn_data(f_type, shape, count):
    fb = insert_resource(f_type, None, {'shape': shape})
    ret = []
    for k in range(count):
        r_id = str(uuid.uuid4())
        insert_datum(str(fb.id), r_id, {'n': k + 1})
        ret.append(r_id)
    return ret
示例#5
0
 def unstage(self):
     "Insert all datums at the end."
     for readings in self._datum_uids.values():
         for reading in readings:
             uid = reading['value']
             kwargs = self._datum_kwargs_map[uid]
             fs.insert_datum(self._resource, uid, kwargs)
     return super().unstage()
示例#6
0
 def _make_data(self):
     N = 15
     filename = self.filename
     data = np.ones((N, 9, 8)) * np.arange(N).reshape(N, 1, 1)
     np.save(filename, data)
     # Insert the data records.
     resource_id = insert_resource('npy_FRAMEWISE', filename + '.npy', {})
     self.datum_ids = [str(uuid.uuid4()) for i in range(N)]
     for i, datum_id in enumerate(self.datum_ids):
         insert_datum(resource_id, datum_id, dict(frame_no=i))
示例#7
0
    def read(self):
        val = super(AreaDetectorFSIterativeWrite, self).read()

        fs.insert_datum(self._filestore_res, self._last_light_uid[0],
                        {'point_number': self._last_light_uid[1]})
        if self._take_darkfield:
            fs.insert_datum(self._filestore_res, self._last_dark_uid[0],
                            {'point_number': self._last_dark_uid[1]})

        return val
示例#8
0
def _insert_syn_data(f_type, shape, count):
    fb = insert_resource(f_type, None, {'shape': shape})
    ret = []
    res_map_cycle = itertools.cycle((lambda x: x,
                                     lambda x: x['id'],
                                     lambda x: str(x['id'])))
    for k, rmap in zip(range(count), res_map_cycle):
        r_id = str(uuid.uuid4())
        insert_datum(rmap(fb), r_id, {'n': k + 1})
        ret.append(r_id)
    return ret
示例#9
0
    def _make_data(self):
        filename = self.filename
        with h5py.File(filename) as f:
            N = 5
            # Write the data.
            data = np.multiply.outer(np.arange(N), np.ones((2, 2)))
            f.create_dataset('/entry/data/data', data=data)

        # Insert the data records.
        resource_id = insert_resource(self.spec, filename)
        self.datum_ids = [str(uuid.uuid4()) for i in range(N)]
        for i, datum_id in enumerate(self.datum_ids):
            insert_datum(resource_id, datum_id, dict(point_number=i))
示例#10
0
def get_data(ind_v, ind_h):
    """
    Get data for given x, y index.

    Parameters
    ----------
    ind_v : int
        vertical index
    ind_h : int
        horizontal index

    Returns
    -------
    unicode:
        id number of event
    """

    uid = str(uuid.uuid1())

    # generate 3D random number with a given shape
    syn_data = np.random.randn(20, 1, 10)
    file_path = save_syn_data(uid, syn_data)

    custom = {'dset_path': 'mca_arr'}

    fb = insert_resource('hdf_maps', file_path, resource_kwargs=custom)
    evl = insert_datum(fb, uid, datum_kwargs={'x': ind_v, 'y': ind_h})
    return evl.datum_id
示例#11
0
    def _make_data(self):
        filename = self.filename
        with h5py.File(filename) as f:
            N = 5
            # Write the data.
            data = np.arange(N, dtype=np.float64)
            f.create_dataset(
                '/entry/instrument/NDAttributes/NDArrayEpicsTSSec',
                data=data)
            f.create_dataset(
                '/entry/instrument/NDAttributes/NDArrayEpicsTSnSec',
                data=data * 1e9)

        # Insert the data records.
        resource_id = insert_resource(self.spec, filename)
        self.datum_ids = [str(uuid.uuid4()) for i in range(N)]
        for i, datum_id in enumerate(self.datum_ids):
            insert_datum(resource_id, datum_id, dict(point_number=i))
示例#12
0
    def add_data(self, data, uid=None, resource_kwargs=None):
        """
        Parameters
        ----------
        data : ndarray
            The data to save

        uid : str, optional
            The uid to be used for this entry,
            if not given use uuid1 to generate one

        resource_kwargs : None, optional
            Currently raises if not 'falsy' and is ignored.

        Returns
        -------
        uid : str
            The uid used to register this data with filestore, can
            be used to retrieve it
        """
        if not self._writable:
            raise RuntimeError("This writer can only write one data entry "
                               "and has already been used")

        if resource_kwargs:
            raise ValueError("This writer does not support resource_kwargs")

        if op.exists(self._fpath):
            raise IOError("the requested file {fpath} "
                          "already exist".format(fpath=self._fpath))

        if uid is None:
            uid = str(uuid.uuid1())

        np.save(self._fpath, np.asanyarray(data))
        self._writable = False
        fb = fsc.insert_resource(self.SPEC_NAME, self._fpath, self._f_custom)
        evl = fsc.insert_datum(fb, uid, {})

        return evl.datum_id
示例#13
0
    def deconfigure(self, *args, **kwargs):

        for uid, i in self._uid_cache:
            fs.insert_datum(self._filestore_res, str(uid), {'point_number': i})

        super(AreaDetectorFSBulkEntry, self).deconfigure(*args, **kwargs)
示例#14
0
 def generate_datum(self, key, timestamp):
     uid = super().generate_datum(key, timestamp)
     i = next(self._point_counter)
     fs.insert_datum(self._resource, uid, {'point_number': i})
     return uid