Ejemplo n.º 1
0
def test_hdf5():
    """Test HDF5 IO
    """
    tempdir = _TempDir()
    test_file = op.join(tempdir, "test.hdf5")
    sp = np.eye(3) if sparse is None else sparse.eye(3, 3, format="csc")
    sp[2, 2] = 2
    x = dict(
        a=dict(b=np.zeros(3)),
        c=np.zeros(2, np.complex128),
        d=[dict(e=(1, -2.0, "hello", u"goodbyeu\u2764")), None],
        f=sp,
    )
    write_hdf5(test_file, 1)
    assert_equal(read_hdf5(test_file), 1)
    assert_raises(IOError, write_hdf5, test_file, x)  # file exists
    write_hdf5(test_file, x, overwrite=True)
    assert_raises(IOError, read_hdf5, test_file + "FOO")  # not found
    xx = read_hdf5(test_file)
    assert_true(object_diff(x, xx) == "")  # no assert_equal, ugly output

    # bad title
    assert_raises(ValueError, read_hdf5, test_file, title="nonexist")
    assert_raises(ValueError, write_hdf5, test_file, x, overwrite=True, title=1)
    assert_raises(ValueError, read_hdf5, test_file, title=1)
    # unsupported objects
    assert_raises(TypeError, write_hdf5, test_file, {1: "foo"}, overwrite=True)
    assert_raises(TypeError, write_hdf5, test_file, object, overwrite=True)
Ejemplo n.º 2
0
 def set_job_status(self, job_id, status):
     db_entry = self.get_item_by_id(item_id=job_id)
     self._job_table.loc[self._job_table.id == job_id, 'status'] = status
     h5io.write_hdf5(db_entry["project"] + db_entry["subjob"] + '.h5',
                     status,
                     title=db_entry["subjob"][1:] + '/status',
                     overwrite="update")
Ejemplo n.º 3
0
    def __setitem__(self, key, value):
        """
        Store data inside the HDF5 file

        Args:
            key (str): key to store the data
            value (pandas.DataFrame, pandas.Series, dict, list, float, int): basically any kind of data is supported
        """
        use_json = True
        if hasattr(value, "to_hdf") & (not isinstance(
                value, (pandas.DataFrame, pandas.Series))):
            value.to_hdf(self, key)
        elif (isinstance(value, (list, np.ndarray)) and len(value) > 0
              and isinstance(value[0], (list, np.ndarray))
              and len(value[0]) > 0 and not isinstance(value[0][0], str)):
            shape_lst = [np.shape(sub) for sub in value]
            if all([shape_lst[0][1:] == t[1:] for t in shape_lst]):
                value = np.array([np.array(v) for v in value], dtype=object)
                use_json = False
        elif isinstance(value, tuple):
            value = list(value)
        h5io.write_hdf5(
            self.file_name,
            value,
            title=posixpath.join(self.h5_path, key),
            overwrite="update",
            use_json=use_json,
        )
Ejemplo n.º 4
0
    def __setitem__(self, key, value):
        """
        Store data inside the HDF5 file

        Args:
            key (str): key to store the data
            value (pandas.DataFrame, pandas.Series, dict, list, float, int): basically any kind of data is supported
        """
        use_json = True
        if hasattr(value, "to_hdf") & (not isinstance(
                value, (pandas.DataFrame, pandas.Series))):
            value.to_hdf(self, key)
        elif (isinstance(value, (list, np.ndarray)) and len(value) > 0
              and isinstance(value[0], (list, np.ndarray))
              and len(value[0]) > 0 and not isinstance(value[0][0], str)
              and _is_ragged_in_1st_dim_only(value)):
            # if the sub-arrays in value all share shape[1:], h5io comes up with a more efficient storage format than
            # just writing a dataset for each element, by concatenating along the first axis and storing the indices
            # where to break the concatenated array again
            value = np.array([np.asarray(v) for v in value], dtype=object)
            use_json = False
        elif isinstance(value, tuple):
            value = list(value)
        h5io.write_hdf5(
            self.file_name,
            value,
            title=posixpath.join(self.h5_path, key),
            overwrite="update",
            use_json=use_json,
        )
Ejemplo n.º 5
0
def test_numpy_values():
    tempdir = _TempDir()
    test_file = op.join(tempdir, 'test.hdf5')
    for cast in [np.int8, np.int16, np.int32, np.int64, np.bool_,
                 np.float16, np.float32, np.float64]:
        value = cast(1)
        write_hdf5(test_file, value, title='first', overwrite='update')
        assert_equal(read_hdf5(test_file, 'first'), value)
Ejemplo n.º 6
0
def test_path_support():
    tempdir = _TempDir()
    test_file = op.join(tempdir, 'test.hdf5')
    write_hdf5(test_file, 1, title='first')
    write_hdf5(test_file, 2, title='second/third', overwrite='update')
    assert_raises(ValueError, read_hdf5, test_file, title='second')
    assert_equal(read_hdf5(test_file, 'first'), 1)
    assert_equal(read_hdf5(test_file, 'second/third'), 2)
Ejemplo n.º 7
0
def test_path_support(tmpdir):
    tempdir = str(tmpdir)
    test_file = op.join(tempdir, 'test.hdf5')
    write_hdf5(test_file, 1, title='first')
    write_hdf5(test_file, 2, title='second/third', overwrite='update')
    pytest.raises(ValueError, read_hdf5, test_file, title='second')
    assert_equal(read_hdf5(test_file, 'first'), 1)
    assert_equal(read_hdf5(test_file, 'second/third'), 2)
Ejemplo n.º 8
0
 def save(self, fname, overwrite=False):
     if not isinstance(fname, Path):
         fname = Path(fname)
     self._save_info(fname, overwrite=overwrite)
     save_vars = self._get_save_vars(exclude=['ch_info_', 'data_norm_'])
     write_hdf5(fname,
                save_vars,
                title=_get_title(self.__class__, self.comment),
                overwrite=overwrite,
                slash='replace')
Ejemplo n.º 9
0
def test_numpy_values(tmpdir):
    """Test NumPy values."""
    test_file = op.join(str(tmpdir), 'test.hdf5')
    for cast in [
            np.int8, np.int16, np.int32, np.int64, np.bool_, np.float16,
            np.float32, np.float64
    ]:
        value = cast(1)
        write_hdf5(test_file, value, title='first', overwrite='update')
        assert_equal(read_hdf5(test_file, 'first'), value)
Ejemplo n.º 10
0
def test_timezone(name, tmpdir):
    """Test datetime.timezone support."""
    fname = op.join(str(tmpdir), 'test.hdf5')
    kwargs = dict()
    if name is not None:
        kwargs['name'] = name
    x = datetime.timezone(datetime.timedelta(hours=-7), **kwargs)
    write_hdf5(fname, x)
    y = read_hdf5(fname)
    assert isinstance(y, datetime.timezone)
    assert y == x
    if name is not None:
        assert y.tzname(None) == name
Ejemplo n.º 11
0
def write_hdf5_mne_epochs(fname, epochs, overwrite=False):
    epochs_vars = {
        k: v
        for k, v in vars(epochs).items()
        if not k.startswith('_') or k == '_data'
    }
    epochs_vars['tmin'] = epochs.tmin
    epochs_vars['tmax'] = epochs.tmax
    epochs_vars['info'] = info_to_dict(epochs_vars['info'])
    write_hdf5(fname,
               epochs_vars,
               overwrite=overwrite,
               title='nice/data/epochs',
               slash='replace')
Ejemplo n.º 12
0
 def save(self, fname, overwrite=False):
     if isinstance(fname, Path):
         fname = fname.as_posix()
     if not fname.endswith('-markers.hdf5'):
         logger.warning('Feature collections file name should end '
                        'with "-markers.hdf5". Some NICE markers '
                        'might not work.')
     write_hdf5(fname,
                list(self.keys()),
                title='nice/markers/order',
                overwrite=overwrite,
                slash='replace')
     for meas in self.values():
         meas.save(fname, overwrite='update')
Ejemplo n.º 13
0
def test_h5_file_object(tmpdir):
    tempdir = str(tmpdir)
    test_file_path = op.join(tempdir, 'test1.hdf5')
    # test that wrong object type raises error
    pytest.raises(ValueError, write_hdf5, fname=33, data=1)
    # test that reading/writing are unaffected
    with h5py.File(test_file_path, 'a') as test_file_obj:
        data = {'a': 42}
        write_hdf5(test_file_obj, data)
        assert_equal(read_hdf5(test_file_obj), data)
    # test that wrong mode raises error
    with h5py.File(test_file_path, 'r') as test_file_obj:
        assert test_file_obj.mode == 'r'
        with pytest.raises(UnsupportedOperation):
            write_hdf5(test_file_obj, data=1)
Ejemplo n.º 14
0
def test_multi_dim_array(tmpdir):
    """Test multidimensional arrays."""
    rng = np.random.RandomState(0)
    traj = np.array([rng.randn(2, 1), rng.randn(3, 1)])
    test_file = op.join(str(tmpdir), 'test.hdf5')
    write_hdf5(test_file, traj, title='first', overwrite='update')
    for traj_read, traj_sub in zip(read_hdf5(test_file, 'first'), traj):
        assert (np.equal(traj_read, traj_sub).all())
    traj_no_structure = np.array([rng.randn(2, 1, 1), rng.randn(3, 1, 2)])
    pytest.raises(ValueError,
                  write_hdf5,
                  test_file,
                  traj_no_structure,
                  title='second',
                  overwrite='update')
Ejemplo n.º 15
0
    def save(self, fname, overwrite=False):
        """MANDATORY.

        Save method should be overriden to use the
        custom title param.
        """
        if not isinstance(fname, Path):
            fname = Path(fname)
        self._save_info(fname, overwrite=overwrite)
        save_vars = self._get_save_vars(exclude=['ch_info_'])
        write_hdf5(
            fname,
            save_vars,
            title=_get_title(self.__class__, self.comment),
            overwrite=overwrite, slash='replace')
Ejemplo n.º 16
0
    def __setitem__(self, key, value):
        """
        Store data inside the HDF5 file

        Args:
            key (str): key to store the data
            value (pandas.DataFrame, pandas.Series, dict, list, float, int): basically any kind of data is supported
        """
        if hasattr(value, "to_hdf") & (not isinstance(
                value, (pandas.DataFrame, pandas.Series))):
            value.to_hdf(self, key)
        else:
            h5io.write_hdf5(self.file_name,
                            value,
                            title=posixpath.join(self.h5_path, key),
                            overwrite="update")
Ejemplo n.º 17
0
    def save(self, fname, overwrite=False):
        """Save GLM results to disk.

        Parameters
        ----------
        fname : str
            The filename to use to write the HDF5 data.
            Should end in ``'glm.h5'``.
        %(overwrite)s
        """
        _validate_type(fname, 'path-like', 'fname')
        if isinstance(fname, PosixPath):
            fname = str(fname)
        if not fname.endswith('glm.h5'):
            raise IOError('The filename must end with glm.h5, '
                          f'instead received {fname}')
        write_hdf5(fname, self._get_state(),
                   overwrite=overwrite, title='mnepython')
Ejemplo n.º 18
0
    def write_hdf(self, fname, **kwargs):
        """
        Write the object to an HDF file.

        This bottles up all object attributes into a dictionary,
        and sends it to an HDF file via the h5py library.

        Parameters
        ----------
        fname : str
            The file name of the hdf5 file.
        **kwargs : dict
            Arguments are passed to write_hdf5
        """
        print('Writing TFR data to {0}'.format(fname))
        write_dict = {'data': self.data, 'freqs': self.freqs,
                      'events': self.events, 'times': self.times,
                      'info': self.info, 'event_id': self.event_id}
        h5io.write_hdf5(fname, write_dict, **kwargs)
Ejemplo n.º 19
0
    def save(self, fname, overwrite=False):
        if not isinstance(fname, Path):
            fname = Path(fname)
        self._save_info(fname, overwrite=overwrite)
        save_vars = self._get_save_vars(
            exclude=['ch_info_', 'estimator', 'data_'])

        has_estimator = False
        estimator_name = self.estimator._get_title()
        with h5py.File(fname, 'r') as h5fid:
            if estimator_name in h5fid:
                has_estimator = True
                logger.info('PSDS Estimator already present in HDF5 file, '
                            'will not be overwritten')

        if not has_estimator:
            logger.info('Writing PSDS Estimator to HDF5 file')
            self.estimator.save(fname, overwrite=overwrite)
        write_hdf5(fname,
                   save_vars,
                   overwrite=overwrite,
                   title=_get_title(self.__class__, self.comment),
                   slash='replace')
Ejemplo n.º 20
0
    def set_job_status(self, job_specifier, status, project=None):
        """
        Set the status of a particular job

        Args:
            database (DatabaseAccess): Database object
            sql_query (str): SQL query to enter a more specific request
            user (str): username of the user whoes user space should be searched
            project_path (str): root_path - this is in contrast to the project_path in GenericPath
            job_specifier (str): name of the job or job ID
            status (str): job status can be one of the following ['initialized', 'appended', 'created', 'submitted',
                         'running', 'aborted', 'collect', 'suspended', 'refresh', 'busy', 'finished']

        """
        if project is None:
            project = self._project
        job_id = self.get_job_id(project=project, job_specifier=job_specifier)
        self._job_table.loc[self._job_table.id == job_id, 'status'] = status
        db_entry = self.get_item_by_id(item_id=job_id)
        h5io.write_hdf5(db_entry["project"] + db_entry["subjob"] + '.h5',
                        status,
                        title=db_entry["subjob"][1:] + '/status',
                        overwrite="update")
Ejemplo n.º 21
0
    def write_hdf(self, fname, **kwargs):
        """
        Write the object to an HDF file.

        This bottles up all object attributes into a dictionary,
        and sends it to an HDF file via the h5py library.

        Parameters
        ----------
        fname : str
            The file name of the hdf5 file.
        **kwargs : dict
            Arguments are passed to write_hdf5
        """
        print('Writing TFR data to {0}'.format(fname))
        write_dict = {
            'data': self.data,
            'freqs': self.freqs,
            'events': self.events,
            'times': self.times,
            'info': self.info,
            'event_id': self.event_id
        }
        h5io.write_hdf5(fname, write_dict, **kwargs)
Ejemplo n.º 22
0
 def to_hdf(self):
     """
     Store input, output and the class definition in an HDF5 file - to maintain orthogonal persistence.
     """
     if self._write_input_source is None:
         self._write_input_source = self._obj_to_str(self.write_input)
     if self._collect_output_source is None:
         self._collect_output_source = self._obj_to_str(self.collect_output)
     job_dict = {
         "input": self._input_dict,
         "settings": {
             "executable": self.executable,
             "working_directory": os.path.abspath(self._working_directory),
             "write_input": self._write_input_source,
             "collect_output": self._collect_output_source,
         },
     }
     if len(self.output_dict) != 0:
         job_dict["output"] = self.output_dict
     h5io.write_hdf5(
         os.path.join(self._working_directory, "scisweeper.h5"),
         job_dict,
         overwrite="update",
     )
Ejemplo n.º 23
0
def test_hdf5_use_json():
    """Test HDF5 IO
    """
    tempdir = _TempDir()
    test_file = op.join(tempdir, 'test.hdf5')
    splash_dict = {'first/second': {'one/more': 'value'}}
    pytest.raises(ValueError,
                  write_hdf5,
                  test_file,
                  splash_dict,
                  overwrite=True,
                  slash='error',
                  use_json=True)
    spec_dict = {'first/second': 'third'}
    write_hdf5(test_file,
               spec_dict,
               overwrite=True,
               slash='replace',
               use_json=True)
    assert_equal(
        read_hdf5(test_file, slash='replace').keys(), spec_dict.keys())
    in_keys = list(read_hdf5(test_file, slash='ignore').keys())
    assert ('{FWDSLASH}' in in_keys[0])
    comp_dict = {'first': [1, 2], 'second': 'str', 'third': {'a': 1}}
    write_hdf5(test_file, comp_dict, overwrite=True, use_json=True)
    assert_equal(sorted(read_hdf5(test_file, slash='replace').keys()),
                 sorted(comp_dict.keys()))
    numpy_dict = {'first': np.array([1])}
    write_hdf5(test_file, numpy_dict, overwrite=True, use_json=True)
    assert_equal(
        list(read_hdf5(test_file, slash='replace').values())[0],
        list(numpy_dict.values())[0])
    pytest.raises(ValueError, read_hdf5, test_file, slash='brains')
    # Testing that title slashes aren't replaced
    write_hdf5(test_file,
               spec_dict,
               title='one/two',
               overwrite=True,
               slash='replace',
               use_json=True)
    assert_equal(
        read_hdf5(test_file, title='one/two', slash='replace').keys(),
        spec_dict.keys())
Ejemplo n.º 24
0
def test_datetime(tmpdir):
    """Test datetime.datetime support."""
    fname = op.join(str(tmpdir), 'test.hdf5')
    # Naive
    y, m, d, h, m, s, mu = range(1, 8)
    dt = datetime.datetime(y, m, d, h, m, s, mu)
    for key in ('year', 'month', 'day', 'hour', 'minute', 'second',
                'microsecond'):
        val = locals()[key[:1] if key != 'microsecond' else 'mu']
        assert val == getattr(dt, key)
    assert dt.year == y
    assert dt.month == m
    write_hdf5(fname, dt)
    dt2 = read_hdf5(fname)
    assert isinstance(dt2, datetime.datetime)
    assert dt == dt2
    assert dt2.tzinfo is None
    # Aware
    dt = dt.replace(tzinfo=datetime.timezone.utc)
    write_hdf5(fname, dt, overwrite=True)
    dt2 = read_hdf5(fname)
    assert isinstance(dt2, datetime.datetime)
    assert dt == dt2
    assert dt2.tzinfo is datetime.timezone.utc
    # Custom
    dt = dt.replace(tzinfo=XT())
    write_hdf5(fname, dt, overwrite=True)
    dt2 = read_hdf5(fname)
    assert isinstance(dt2, datetime.datetime)
    assert dt == dt2
    assert dt2.tzinfo is not None
    assert dt2.tzinfo is not datetime.timezone.utc
    for key in ('utcoffset', 'tzname', 'dst'):
        v1 = getattr(dt2.tzinfo, key)(None)
        v2 = getattr(dt.tzinfo, key)(None)
        assert v1 == v2
Ejemplo n.º 25
0
###############################################################################
# Load data
# ---------

X, y = [list() for _ in range(len(ages))], list()
for ai, age in enumerate(ages):
    shape = None
    for mi, measure in enumerate(measures):
        fast_fname = 'genz_%s_%s_fast.h5' % (age, measure)
        if not op.isfile(fast_fname):
            print('Converting %s measure %s' % (age, measure))
            data = read_hdf5('genz_%s_%s.h5' % (age, measure))
            data = data['data_vars'][measure]['data']
            data = np.array(data)
            assert data.dtype == np.float
            write_hdf5(fast_fname, data)
        data = read_hdf5(fast_fname)
        if shape is None:
            shape = data.shape
            assert shape[-1] == 2
        assert data.shape == shape
        assert data.ndim == 4
        #data = data[freq_idx]  # only use these freqs
        # deal with reordering (undo it to restore original order)
        order = np.argsort(data[:, :, :, 0], axis=-1)
        data = data[..., 1]
        for ii in range(data.shape[0]):
            for jj in range(data.shape[1]):
                data[ii, jj] = data[ii, jj, order[ii, jj]]
        # put in subject, freq, roi order
        data = data.transpose(1, 0, 2)
Ejemplo n.º 26
0
        # to keep the run from which the event was found
        events_meg_[:, 1] = run_number
        events_meg.append(events_meg_)
        run_number = run_number + 1
    events_meg = np.vstack(events_meg)  # concatenate all meg events

    # Compare MEG and bhv triggers and save events_behavior for each event
    event_types = ['Target', 'Cue', 'Probe']
    events_behavior_type = []
    for event_type in event_types:
        print(event_type)
        events_behavior_type = fix_triggers(events_meg, events_behavior,
                                            event_type='trigg' + event_type)
        fname = op.join(path_data,  subject, 'behavior_%s.hdf5' % event_type)
        write_hdf5(fname, events_behavior_type)

    # Epoch raw data (Target)

        epochs_list = list()
        run_number = 1
        for run in runs:
            fname_raw = op.join(path_data, subject, run)
            print(fname_raw)
            raw = read_raw_ctf(fname_raw, preload=True, system_clock='ignore')
            raw.filter(.75, h_freq=30.0)
            sel = events_behavior_type['meg_file'] == float(run_number)
            time_sample = events_behavior_type['meg_event_tsample'][sel]
            trigger_value = events_behavior_type['meg_event_value'][sel]
            events_meg_run = np.vstack((time_sample.astype(int),
                                        np.zeros_like(time_sample, int),
Ejemplo n.º 27
0
def test_hdf5(tmpdir):
    """Test HDF5 IO."""
    tempdir = str(tmpdir)
    test_file = op.join(tempdir, 'test.hdf5')
    sp = np.eye(3) if sparse is None else sparse.eye(3, 3, format='csc')
    sp_csr = np.eye(3) if sparse is None else sparse.eye(3, 3, format='csr')
    df = np.eye(3) if isinstance(DataFrame, type(None)) else DataFrame(
        np.eye(3))
    sr = np.eye(3) if isinstance(Series, type(None)) else Series(
        np.random.randn(3))
    sp[2, 2] = 2
    sp_csr[2, 2] = 2
    x = dict(a=dict(b=np.zeros(3)),
             c=np.zeros(2, np.complex128),
             d=[dict(e=(1, -2., 'hello', u'goodbyeu\u2764')), None],
             f=sp,
             g=dict(dfa=df, srb=sr),
             h=sp_csr,
             i=sr,
             j='hi')
    write_hdf5(test_file, 1)
    assert_equal(read_hdf5(test_file), 1)
    pytest.raises(IOError, write_hdf5, test_file, x)  # file exists
    write_hdf5(test_file, x, overwrite=True)
    pytest.raises(IOError, read_hdf5, test_file + 'FOO')  # not found
    xx = read_hdf5(test_file)
    assert (object_diff(x, xx) == '')  # no assert_equal, ugly output
    list_file_contents(test_file)  # Testing the h5 listing
    pytest.raises(TypeError, list_file_contents, sp)  # Only string works
    write_hdf5(test_file, np.bool_(True), overwrite=True)
    assert_equal(read_hdf5(test_file), np.bool_(True))

    # bad title
    pytest.raises(ValueError, read_hdf5, test_file, title='nonexist')
    pytest.raises(ValueError,
                  write_hdf5,
                  test_file,
                  x,
                  overwrite=True,
                  title=1)
    pytest.raises(ValueError, read_hdf5, test_file, title=1)
    # unsupported objects
    pytest.raises(TypeError, write_hdf5, test_file, {1: 'foo'}, overwrite=True)
    pytest.raises(TypeError, write_hdf5, test_file, object, overwrite=True)
    # special_chars
    spec_dict = {'first/second': 'third'}
    pytest.raises(ValueError, write_hdf5, test_file, spec_dict, overwrite=True)
    pytest.raises(ValueError,
                  write_hdf5,
                  test_file,
                  spec_dict,
                  overwrite=True,
                  slash='brains')
    write_hdf5(test_file, spec_dict, overwrite=True, slash='replace')
    assert_equal(
        read_hdf5(test_file, slash='replace').keys(), spec_dict.keys())
    in_keys = list(read_hdf5(test_file, slash='ignore').keys())
    assert ('{FWDSLASH}' in in_keys[0])
    pytest.raises(ValueError, read_hdf5, test_file, slash='brains')
    # Testing that title slashes aren't replaced
    write_hdf5(test_file,
               spec_dict,
               title='one/two',
               overwrite=True,
               slash='replace')
    assert_equal(
        read_hdf5(test_file, title='one/two', slash='replace').keys(),
        spec_dict.keys())

    write_hdf5(test_file, 1, title='first', overwrite=True)
    write_hdf5(test_file, 2, title='second', overwrite='update')
    assert_equal(read_hdf5(test_file, title='first'), 1)
    assert_equal(read_hdf5(test_file, title='second'), 2)
    pytest.raises(IOError, write_hdf5, test_file, 3, title='second')
    write_hdf5(test_file, 3, title='second', overwrite='update')
    assert_equal(read_hdf5(test_file, title='second'), 3)

    write_hdf5(test_file, 5, title='second', overwrite='update', compression=5)
    assert_equal(read_hdf5(test_file, title='second'), 5)
Ejemplo n.º 28
0
Archivo: test_io.py Proyecto: h5io/h5io
def test_hdf5():
    """Test HDF5 IO
    """
    tempdir = _TempDir()
    test_file = op.join(tempdir, 'test.hdf5')
    sp = np.eye(3) if sparse is None else sparse.eye(3, 3, format='csc')
    sp_csr = np.eye(3) if sparse is None else sparse.eye(3, 3, format='csr')
    df = np.eye(3) if isinstance(DataFrame, type(None)) else DataFrame(
        np.eye(3))
    sr = np.eye(3) if isinstance(Series, type(None)) else Series(
        np.random.randn(3))
    sp[2, 2] = 2
    sp_csr[2, 2] = 2
    x = dict(a=dict(b=np.zeros(3)), c=np.zeros(2, np.complex128),
             d=[dict(e=(1, -2., 'hello', u'goodbyeu\u2764')), None], f=sp,
             g=dict(dfa=df, srb=sr), h=sp_csr, i=sr, j='hi')
    write_hdf5(test_file, 1)
    assert_equal(read_hdf5(test_file), 1)
    assert_raises(IOError, write_hdf5, test_file, x)  # file exists
    write_hdf5(test_file, x, overwrite=True)
    assert_raises(IOError, read_hdf5, test_file + 'FOO')  # not found
    xx = read_hdf5(test_file)
    assert_true(object_diff(x, xx) == '')  # no assert_equal, ugly output
    list_file_contents(test_file)  # Testing the h5 listing
    assert_raises(TypeError, list_file_contents, sp)  # Only string works
    write_hdf5(test_file, np.bool_(True), overwrite=True)
    assert_equal(read_hdf5(test_file), np.bool_(True))

    # bad title
    assert_raises(ValueError, read_hdf5, test_file, title='nonexist')
    assert_raises(ValueError, write_hdf5, test_file, x, overwrite=True,
                  title=1)
    assert_raises(ValueError, read_hdf5, test_file, title=1)
    # unsupported objects
    assert_raises(TypeError, write_hdf5, test_file, {1: 'foo'},
                  overwrite=True)
    assert_raises(TypeError, write_hdf5, test_file, object, overwrite=True)
    # special_chars
    spec_dict = {'first/second': 'third'}
    assert_raises(ValueError, write_hdf5, test_file, spec_dict, overwrite=True)
    assert_raises(ValueError, write_hdf5, test_file, spec_dict, overwrite=True,
                  slash='brains')
    write_hdf5(test_file, spec_dict, overwrite=True, slash='replace')
    assert_equal(
        read_hdf5(test_file, slash='replace').keys(), spec_dict.keys())
    in_keys = list(read_hdf5(test_file, slash='ignore').keys())
    assert_true('{FWDSLASH}' in in_keys[0])
    assert_raises(ValueError, read_hdf5, test_file, slash='brains')
    # Testing that title slashes aren't replaced
    write_hdf5(
        test_file, spec_dict, title='one/two', overwrite=True, slash='replace')
    assert_equal(read_hdf5(test_file, title='one/two', slash='replace').keys(),
                 spec_dict.keys())

    write_hdf5(test_file, 1, title='first', overwrite=True)
    write_hdf5(test_file, 2, title='second', overwrite='update')
    assert_equal(read_hdf5(test_file, title='first'), 1)
    assert_equal(read_hdf5(test_file, title='second'), 2)
    assert_raises(IOError, write_hdf5, test_file, 3, title='second')
    write_hdf5(test_file, 3, title='second', overwrite='update')
    assert_equal(read_hdf5(test_file, title='second'), 3)

    write_hdf5(test_file, 5, title='second', overwrite='update', compression=5)
    assert_equal(read_hdf5(test_file, title='second'), 5)
Ejemplo n.º 29
0
def test_hdf5():
    """Test HDF5 IO
    """
    tempdir = _TempDir()
    test_file = op.join(tempdir, 'test.hdf5')
    sp = np.eye(3) if sparse is None else sparse.eye(3, 3, format='csc')
    df = np.eye(3) if isinstance(DataFrame, type(None)) else DataFrame(
        np.eye(3))
    sr = np.eye(3) if isinstance(Series, type(None)) else Series(
        np.random.randn(3))
    sp[2, 2] = 2
    x = dict(a=dict(b=np.zeros(3)), c=np.zeros(2, np.complex128),
             d=[dict(e=(1, -2., 'hello', u'goodbyeu\u2764')), None], f=sp,
             g=dict(dfa=df, srb=sr))
    write_hdf5(test_file, 1)
    assert_equal(read_hdf5(test_file), 1)
    assert_raises(IOError, write_hdf5, test_file, x)  # file exists
    write_hdf5(test_file, x, overwrite=True)
    assert_raises(IOError, read_hdf5, test_file + 'FOO')  # not found
    xx = read_hdf5(test_file)
    assert_true(object_diff(x, xx) == '')  # no assert_equal, ugly output

    # bad title
    assert_raises(ValueError, read_hdf5, test_file, title='nonexist')
    assert_raises(ValueError, write_hdf5, test_file, x, overwrite=True,
                  title=1)
    assert_raises(ValueError, read_hdf5, test_file, title=1)
    # unsupported objects
    assert_raises(TypeError, write_hdf5, test_file, {1: 'foo'},
                  overwrite=True)
    assert_raises(TypeError, write_hdf5, test_file, object, overwrite=True)

    write_hdf5(test_file, 1, title='first', overwrite=True)
    write_hdf5(test_file, 2, title='second', overwrite='update')
    assert_equal(read_hdf5(test_file, title='first'), 1)
    assert_equal(read_hdf5(test_file, title='second'), 2)
    assert_raises(IOError, write_hdf5, test_file, 3, title='second')
    write_hdf5(test_file, 3, title='second', overwrite='update')
    assert_equal(read_hdf5(test_file, title='second'), 3)

    write_hdf5(test_file, 5, title='second', overwrite='update', compression=5)
    assert_equal(read_hdf5(test_file, title='second'), 5)
Ejemplo n.º 30
0
                # Apply baseline of Target
                bsl_channels = pick_types(epochs.info, meg=True)
                bsl_data = epochs_baseline.get_data()[:, bsl_channels, :]
                bsl_data = np.mean(bsl_data, axis=2)
                epochs._data[:, bsl_channels, :] -= bsl_data[:, :, np.newaxis]
            else:
                # Apply baseline from beginning of epoch to t0
                epochs.apply_baseline((-0.2, 0.))
            epochs_list.append(epochs)
        epochs = concatenate_epochs(epochs_list)
        # Save epochs and hdf5 behavior
        suffix = '' if target_baseline else '_bsl'
        session = '_2' if subject[-1:] == '2' else '_1'
        fname = op.join(path_data, subject,
                        'behavior_%s%s.hdf5' % (event_type, session))
        write_hdf5(fname, events_behavior_type, overwrite=True)
        fname = op.join(path_data, subject,
                        'epochs_%s%s%s.fif' % (event_type, suffix, session))
        epochs.save(fname)

# concatenate the two sessions when 2nd one is existing
subject = sys.argv[1]
suffix = '' if target_baseline else '_bsl'
for event_type in event_types:
    subject_2 = subject + '_2'
    if os.path.exists(op.join(path_data, subject_2)):
        epochs_list = list()
        fname1 = op.join(path_data, subject,
                         'epochs_%s%s_1.fif' % (event_type, suffix))
        epochs = mne.read_epochs(fname1)
        # Copy dev_head_t of the first session to the second session
Ejemplo n.º 31
0
def test_hdf5():
    """Test HDF5 IO
    """
    tempdir = _TempDir()
    test_file = op.join(tempdir, 'test.hdf5')
    sp = np.eye(3) if sparse is None else sparse.eye(3, 3, format='csc')
    df = np.eye(3) if isinstance(DataFrame, type(None)) else DataFrame(
        np.eye(3))
    sr = np.eye(3) if isinstance(Series, type(None)) else Series(
        np.random.randn(3))
    sp[2, 2] = 2
    x = dict(a=dict(b=np.zeros(3)),
             c=np.zeros(2, np.complex128),
             d=[dict(e=(1, -2., 'hello', u'goodbyeu\u2764')), None],
             f=sp,
             g=dict(dfa=df, srb=sr))
    write_hdf5(test_file, 1)
    assert_equal(read_hdf5(test_file), 1)
    assert_raises(IOError, write_hdf5, test_file, x)  # file exists
    write_hdf5(test_file, x, overwrite=True)
    assert_raises(IOError, read_hdf5, test_file + 'FOO')  # not found
    xx = read_hdf5(test_file)
    assert_true(object_diff(x, xx) == '')  # no assert_equal, ugly output

    # bad title
    assert_raises(ValueError, read_hdf5, test_file, title='nonexist')
    assert_raises(ValueError,
                  write_hdf5,
                  test_file,
                  x,
                  overwrite=True,
                  title=1)
    assert_raises(ValueError, read_hdf5, test_file, title=1)
    # unsupported objects
    assert_raises(TypeError, write_hdf5, test_file, {1: 'foo'}, overwrite=True)
    assert_raises(TypeError, write_hdf5, test_file, object, overwrite=True)

    write_hdf5(test_file, 1, title='first', overwrite=True)
    write_hdf5(test_file, 2, title='second', overwrite='update')
    assert_equal(read_hdf5(test_file, title='first'), 1)
    assert_equal(read_hdf5(test_file, title='second'), 2)
    assert_raises(IOError, write_hdf5, test_file, 3, title='second')
    write_hdf5(test_file, 3, title='second', overwrite='update')
    assert_equal(read_hdf5(test_file, title='second'), 3)

    write_hdf5(test_file, 5, title='second', overwrite='update', compression=5)
    assert_equal(read_hdf5(test_file, title='second'), 5)