Пример #1
0
 def add_scratch(self, **kwargs):
     '''Add data to the scratch space'''
     data, name, notes = getargs('data', 'name', 'notes', kwargs)
     if isinstance(data, (np.ndarray, pd.DataFrame, list, tuple)):
         if name is None:
             raise ValueError('please provide a name for scratch data')
         if isinstance(data, pd.DataFrame):
             table_description = getargs('table_description', kwargs)
             data = DynamicTable.from_dataframe(
                 df=data, name=name, table_description=table_description)
             if notes is not None:
                 warn(
                     'Notes argument is ignored when adding a pandas DataFrame to scratch'
                 )
         else:
             data = ScratchData(name=name, data=data, notes=notes)
     else:
         if notes is not None:
             warn(
                 'Notes argument is ignored when adding an NWBContainer to scratch'
             )
         if name is not None:
             warn(
                 'Name argument is ignored when adding an NWBContainer to scratch'
             )
     self._add_scratch(data)
Пример #2
0
 def __init__(self, **kwargs):
     table, region = getargs('table', 'region', kwargs)
     self.__table = table
     self.__region = region
     name = getargs('name', kwargs)
     super(NWBTableRegion, self).__init__(name, table)
     self.__regionslicer = get_region_slicer(self.__table.data, self.__region)
Пример #3
0
 def __init__(self, **kwargs):
     if kwargs.get('description', None) is None:
         kwargs['description'] = "data on spiking units"
     call_docval_func(super(Units, self).__init__, kwargs)
     if 'spike_times' not in self.colnames:
         self.__has_spike_times = False
     self.__electrode_table = getargs('electrode_table', kwargs)
     self.waveform_rate = getargs('waveform_rate', kwargs)
     self.waveform_unit = getargs('waveform_unit', kwargs)
     self.resolution = getargs('resolution', kwargs)
Пример #4
0
 def create_electrode_table_region(self, **kwargs):
     if self.electrodes is None:
         msg = "no electrodes available. add electrodes before creating a region"
         raise RuntimeError(msg)
     region = getargs('region', kwargs)
     for idx in region:
         if idx < 0 or idx >= len(self.electrodes):
             raise IndexError('The index ' + str(idx) +
                              ' is out of range for the ElectrodeTable of length '
                              + str(len(self.electrodes)))
     desc = getargs('description', kwargs)
     name = getargs('name', kwargs)
     return DynamicTableRegion(name, region, desc, self.electrodes)
Пример #5
0
 def add_container(self, **kwargs):
     '''
     Add an NWBContainer to this ProcessingModule
     '''
     container = getargs('container', kwargs)
     warn(PendingDeprecationWarning('add_container will be replaced by add'))
     self.add(container)
Пример #6
0
 def add_features(self, **kwargs):
     time, features = getargs('time', 'features', kwargs)
     if type(self.timestamps) == list and type(self.data) is list:
         self.timestamps.append(time)
         self.data.append(features)
     else:
         raise ValueError('Can only add feature if timestamps and data are lists')
Пример #7
0
def flatten_column_index(**kwargs):
    """
    Flatten the column index of a pandas DataFrame.

    The functions changes the dataframe.columns from a pandas.MultiIndex to a normal Index,
    with each column usually being identified by a tuple of strings. This function is
    typically used in conjunction with DataFrames generated
    by :py:meth:`~hdmf.common.hierarchicaltable.to_hierarchical_dataframe`

    :raises ValueError: In case the num_levels is not >0
    :raises TypeError: In case that dataframe parameter is not a pandas.Dataframe.
    """
    dataframe, max_levels, inplace = getargs('dataframe', 'max_levels', 'inplace', kwargs)
    if max_levels is not None and max_levels <= 0:
        raise ValueError('max_levels must be greater than 0')
    # Compute the new column names
    col_names = [__flatten_column_name(col) for col in dataframe.columns.values]
    # Apply the max_levels filter. Make sure to do this only for columns that are actually tuples
    # in order not to accidentally shorten the actual string name of columns
    if max_levels is None:
        select_levels = slice(None)
    elif max_levels == 1:
        select_levels = -1
    else:  # max_levels > 1
        select_levels = slice(-max_levels, None)
    col_names = [col[select_levels] if isinstance(col, tuple) else col for col in col_names]
    re = dataframe if inplace else dataframe.copy()
    re.columns = col_names
    return re
Пример #8
0
 def add_scratch(self, **kwargs):
     '''Add data to the scratch space.'''
     data, name, description = getargs('data', 'name', 'description',
                                       kwargs)
     if isinstance(
             data,
         (str, int, float, bytes, np.ndarray, list, tuple, pd.DataFrame)):
         if name is None:
             msg = (
                 'A name is required for NWBFile.add_scratch when adding a scalar, numpy.ndarray, '
                 'list, tuple, or pandas.DataFrame as scratch data.')
             raise ValueError(msg)
         if description is None:
             msg = (
                 'A description is required for NWBFile.add_scratch when adding a scalar, numpy.ndarray, '
                 'list, tuple, or pandas.DataFrame as scratch data.')
             raise ValueError(msg)
         if isinstance(data, pd.DataFrame):
             data = DynamicTable.from_dataframe(
                 df=data, name=name, table_description=description)
         else:
             data = ScratchData(name=name,
                                data=data,
                                description=description)
     else:
         if name is not None:
             warn(
                 'The name argument is ignored when adding an NWBContainer, ScratchData, or '
                 'DynamicTable to scratch.')
         if description is not None:
             warn(
                 'The description argument is ignored when adding an NWBContainer, ScratchData, or '
                 'DynamicTable to scratch.')
     return self._add_scratch(data)
Пример #9
0
 def __init__(self, **kwargs):
     name, my_data, attr1, attr2, attr3 = getargs('name', 'my_data', 'attr1', 'attr2', 'attr3', kwargs)
     super().__init__(name=name)
     self.__data = my_data
     self.__attr1 = attr1
     self.__attr2 = attr2
     self.__attr3 = attr3
Пример #10
0
 def __init__(self, **kwargs):
     name, bars = getargs('name', 'bars', kwargs)
     super().__init__(name=name)
     self.__bars = bars
     for b in bars:
         if b is not None and b.parent is None:
             b.parent = self
Пример #11
0
 def __init__(self, **kwargs):
     name, attr1, attr2, ext_attr = getargs('name', 'attr1', 'attr2',
                                            'ext_attr', kwargs)
     super().__init__(name=name)
     self.__attr1 = attr1
     self.__attr2 = attr2
     self.__ext_attr = kwargs['ext_attr']
Пример #12
0
 def get_container(self, **kwargs):
     '''
     Retrieve an NWBContainer from this ProcessingModule
     '''
     container_name = getargs('container_name', kwargs)
     warn(PendingDeprecationWarning('get_container will be replaced by get'))
     return self.get(container_name)
Пример #13
0
def get_type_map(**kwargs):
    '''
    Get a BuildManager to use for I/O using the given extensions. If no extensions are provided,
    return a BuildManager that uses the core namespace
    '''
    extensions = getargs('extensions', kwargs)
    type_map = None
    if extensions is None:
        type_map = deepcopy(__TYPE_MAP)
    else:
        if isinstance(extensions, TypeMap):
            type_map = extensions
        else:
            type_map = deepcopy(__TYPE_MAP)
        if isinstance(extensions, list):
            for ext in extensions:
                if isinstance(ext, str):
                    type_map.load_namespaces(ext)
                elif isinstance(ext, TypeMap):
                    type_map.merge(ext)
                else:
                    raise ValueError(
                        'extensions must be a list of paths to namespace specs or a TypeMaps'
                    )
        elif isinstance(extensions, str):
            type_map.load_namespaces(extensions)
        elif isinstance(extensions, TypeMap):
            type_map.merge(extensions)
    return type_map
Пример #14
0
    def __init__(self, **kwargs):
        bits_per_pixel, dimension, external_file, starting_frame, format, device = popargs(
            'bits_per_pixel', 'dimension', 'external_file', 'starting_frame', 'format', 'device', kwargs)
        name, data, unit = getargs('name', 'data', 'unit', kwargs)
        if data is not None and unit is None:
            raise ValueError("Must supply 'unit' argument when supplying 'data' to %s '%s'."
                             % (self.__class__.__name__, name))
        if external_file is None and data is None:
            raise ValueError("Must supply either external_file or data to %s '%s'."
                             % (self.__class__.__name__, name))

        # data and unit are required in TimeSeries, but allowed to be None here, so handle this specially
        if data is None:
            kwargs['data'] = ImageSeries.DEFAULT_DATA
        if unit is None:
            kwargs['unit'] = ImageSeries.DEFAULT_UNIT

        call_docval_func(super(ImageSeries, self).__init__, kwargs)

        self.bits_per_pixel = bits_per_pixel
        self.dimension = dimension
        self.external_file = external_file
        if external_file is not None:
            self.starting_frame = starting_frame
        else:
            self.starting_frame = None
        self.format = format
        self.device = device
Пример #15
0
def get_class(**kwargs):
    """
    Parse the YAML file for a given neurodata_type that is a subclass of NWBContainer and automatically generate its
    python API. This will work for most containers, but is known to not work for descendants of MultiContainerInterface
    and DynamicTable, so these must be defined manually (for now). `get_class` infers the API mapping directly from the
    specification. If you want to define a custom mapping, you should not use this function and you should define the
    class manually.

    Examples
    --------
    Generating and registering an extension is as simple as::

        MyClass = get_class('MyClass', 'ndx-my-extension')

    `get_class` defines only the `__init__` for the class. In cases where you want to provide additional methods for
    querying, plotting, etc. you can still use `get_class` and attach methods to the class after-the-fact, e.g.::

        def get_sum(self, a, b):
            return self.feat1 + self.feat2

        MyClass.get_sum = get_sum

    """
    neurodata_type, namespace = getargs('neurodata_type', 'namespace', kwargs)
    return __TYPE_MAP.get_container_cls(namespace, neurodata_type)
Пример #16
0
def validate(**kwargs):
    """Validate an NWB file against a namespace"""
    io, namespace = getargs('io', 'namespace', kwargs)
    builder = io.read_builder()
    validator = ValidatorMap(
        io.manager.namespace_catalog.get_namespace(name=namespace))
    return validator.validate(builder)
Пример #17
0
 def add_annotation(self, **kwargs):
     '''
     Add an annotation
     '''
     time, annotation = getargs('time', 'annotation', kwargs)
     self.fields['timestamps'].append(time)
     self.fields['data'].append(annotation)
Пример #18
0
    def test_many_args_get_all(self):
        kwargs = {'a': 1, 'b': None, 'c': 3}
        expected_kwargs = kwargs.copy()

        res = getargs('a', 'b', 'c', kwargs)
        self.assertListEqual(res, [1, None, 3])
        self.assertDictEqual(kwargs, expected_kwargs)
Пример #19
0
 def add_row(self, **kwargs):
     values = getargs('values', kwargs)
     if not isinstance(self.data, list):
         msg = 'Cannot append row to %s' % type(self.data)
         raise ValueError(msg)
     ret = len(self.data)
     self.data.append(tuple(values[col] for col in self.columns))
     return ret
Пример #20
0
 def test_many_args_unpack(self):
     kwargs = {'a': 1, 'b': None, 'c': 3}
     expected_kwargs = kwargs.copy()
     res1, res2, res3 = getargs('a', 'b', 'c', kwargs)
     self.assertEqual(res1, 1)
     self.assertEqual(res2, None)
     self.assertEqual(res3, 3)
     self.assertDictEqual(kwargs, expected_kwargs)
Пример #21
0
 def __init__(self, **kwargs):
     name, foos = getargs('name', 'foos', kwargs)
     super().__init__(name=name)
     self.__foos = {f.name: f
                    for f in foos
                    }  # note: collections of groups are unordered in HDF5
     for f in foos:
         f.parent = self
Пример #22
0
 def __init__(self, **kwargs):
     group = getargs('group', kwargs)
     self._H5SpecReader__group = group
     super_kwargs = {
         'source': "%s:%s" % (os.path.abspath(group.file.name), group.name)
     }
     call_docval_func(super(H5SpecReader, self).__init__, super_kwargs)
     self._H5SpecReader__cache = None
Пример #23
0
 def __init__(self, **kwargs):
     name, bazs = getargs('name', 'bazs', kwargs)
     super().__init__(name=name)
     self.__bazs = {b.name: b
                    for b in bazs
                    }  # note: collections of groups are unordered in HDF5
     for b in bazs:
         b.parent = self
Пример #24
0
    def __init__(self, **kwargs):
        """Create a TimeSeries object
        """
        pargs, pkwargs = fmt_docval_args(super(TimeSeries, self).__init__, kwargs)
        super(TimeSeries, self).__init__(*pargs, **pkwargs)
        keys = ("resolution",
                "comments",
                "description",
                "conversion",
                "unit",
                "control",
                "control_description")
        for key in keys:
            val = kwargs.get(key)
            if val is not None:
                setattr(self, key, val)

        data = getargs('data', kwargs)
        self.fields['data'] = data
        if isinstance(data, TimeSeries):
            data.__add_link('data_link', self)
            self.fields['num_samples'] = data.num_samples
        elif isinstance(data, AbstractDataChunkIterator):
            self.fields['num_samples'] = -1
        elif isinstance(data, DataIO):
            this_data = data.data
            if isinstance(this_data, AbstractDataChunkIterator):
                self.fields['num_samples'] = -1
            else:
                self.fields['num_samples'] = len(this_data)
        elif data is None:
            self.fields['num_samples'] = 0
        else:
            self.fields['num_samples'] = len(data)

        timestamps = kwargs.get('timestamps')
        starting_time = kwargs.get('starting_time')
        rate = kwargs.get('rate')
        if timestamps is not None:
            if rate is not None:
                raise ValueError('Specifying rate and timestamps is not supported.')
            if starting_time is not None:
                raise ValueError('Specifying starting_time and timestamps is not supported.')
            self.fields['timestamps'] = timestamps
            self.timestamps_unit = 'Seconds'
            self.interval = 1
            if isinstance(timestamps, TimeSeries):
                timestamps.__add_link('timestamp_link', self)
        elif rate is not None:
            self.rate = rate
            if starting_time is not None:
                self.starting_time = starting_time
                self.starting_time_unit = 'Seconds'
            else:
                self.starting_time = 0.0
        else:
            raise TypeError("either 'timestamps' or 'rate' must be specified")
Пример #25
0
 def set_electrode_table(self, **kwargs):
     """
     Set the electrode table of this NWBFile to an existing ElectrodeTable
     """
     if self.electrodes is not None:
         msg = 'ElectrodeTable already exists, cannot overwrite'
         raise ValueError(msg)
     electrode_table = getargs('electrode_table', kwargs)
     self.electrodes = electrode_table
Пример #26
0
 def get_attr_value(self, **kwargs):
     ''' Get the value of the attribute corresponding to this spec from the given container '''
     spec, container, manager = getargs('spec', 'container', 'manager',
                                        kwargs)
     # handle custom mapping of field 'ext_attr' within container BarHolder/Bar -> spec BarHolder/Bar.ext_attr
     if isinstance(container.parent, BarHolder):
         if spec.name == 'ext_attr':
             return container.ext_attr
     return super().get_attr_value(**kwargs)
Пример #27
0
 def get_scratch(self, **kwargs):
     '''Get data from the scratch space'''
     name, convert = getargs('name', 'convert', kwargs)
     ret = self._get_scratch(name)
     if convert:
         if isinstance(ret, DynamicTable):
             ret = ret.to_dataframe()
         elif isinstance(ret, ScratchData):
             ret = np.asarray(ret.data)
     return ret
Пример #28
0
 def __init__(self, **kwargs):
     imaging_plane, reference_images = popargs('imaging_plane', 'reference_images', kwargs)
     if kwargs.get('name') is None:
         kwargs['name'] = imaging_plane.name
     columns, colnames = getargs('columns', 'colnames', kwargs)
     call_docval_func(super(PlaneSegmentation, self).__init__, kwargs)
     self.imaging_plane = imaging_plane
     if isinstance(reference_images, ImageSeries):
         reference_images = (reference_images,)
     self.reference_images = reference_images
Пример #29
0
        def __init__(self, **kwargs):
            super(HTKChannelIterator, self).__init__()
            self.data = getargs('data',kwargs)
            self.__dtype = getargs('dtype',kwargs)
            self.current_fileindex = 0
            self.time_axis_first = getargs('time_axis_first', kwargs)
            self.__maxshape = list(getargs('maxshape',kwargs))
            self.__has_bands = getargs('has_bands',kwargs)
            if self.time_axis_first:
                # Swap the axes on the shape and maxshape
                self.shape = (self.data.shape[1], self.data.shape[0], self.data.shape[2])
                self.__maxshape[0], self.__maxshape[1] = self.__maxshape[1], self.__maxshape[0]
                self.__maxshape = tuple(self.__maxshape)
                if not self.__has_bands:
                    self.shape = self.shape[0:2]
                    self.__maxshape = self.__maxshape[0:2]

            else:
                self.shape = self.data.shape
Пример #30
0
 def add(self, **kwargs):
     '''
     Add a container to this LabelledDict
     '''
     container = getargs('container', kwargs)
     key = getattr(container, self.__defkey, None)
     if key is None:
         msg = "container '%s' does not have attribute '%s'" % (container.name, self.__defkey)
         raise ValueError(msg)
     self[key] = container