Beispiel #1
0
    def __init__(self):
        """
        Attributes
        ----------
        spatial : NSpatial
            Spatial data object
        spike : NSpike
            Spike data object
        lfp : Nlfp
            LFP data object
        hdf : NHdf
            Object for manipulating HDF5 file
        data_format : str
            Recording system or format of the data file

        """

        super().__init__()
        self.spike = NSpike(name='C0')
        self.spatial = NSpatial(name='S0')
        self.lfp = NLfp(name='L0')
        self.data_format = 'Axona'
        self._results = oDict()
        self.hdf = Nhdf()

        self.__type = 'data'
Beispiel #2
0
    def load_spatial(self, *args, **kwargs):
        """
        Call the NeuroChaT NSpatial.load method.

        Returns
        -------
        dict
            The keys of this dictionary are saved as attributes
            in simuran.single_unit.SingleUnit.load()
        """
        self.spatial = NSpatial()
        self.spatial.load(*args, self.load_params["system"])
        return {
            "underlying":
            self.spatial,
            "date":
            self.spatial.get_date(),
            "time":
            self.spatial.get_time(),
            "speed":
            self.spatial.get_speed() * (u.cm / u.s),
            "position": (
                self.spatial.get_pos_x() * u.cm,
                self.spatial.get_pos_y() * u.cm,
            ),
            "direction":
            self.spatial.get_direction() * u.deg,
        }
Beispiel #3
0
    def __init__(self):
        """See NData class description."""
        super().__init__()
        self.spike = NSpike(name='C0')
        self.spatial = NSpatial(name='S0')
        self.lfp = NLfp(name='L0')
        self.data_format = 'Axona'
        self._results = oDict()
        self.hdf = Nhdf()

        self.__type = 'data'
Beispiel #4
0
def downsample_place(self, ftimes, other_spatial, other_ftimes, **kwargs):
    """
    Compute all place cell statistics after downsampling.

    Downsampling is performed by downsample_place.

    Calculates the two-dimensional firing rate of the unit with respect to
    the location of the animal in the environment. 
    This is called Firing map.

    Specificity indices are measured to assess the quality of location-specific firing of the unit.

    This method also plot the events of spike occurring superimposed on the
    trace of the animal in the arena, commonly known as Spike Plot.

    Parameters
    ----------
    ftimes : ndarray
        Timestamps of the spiking activity of a unit
    other_spatial : NSpatial
        The spatial data to downsample to.
    other_ftimes : list or ndarray
        The firing times of the cell in other spatial.
    **kwargs
        Keyword arguments

    Returns
    -------
    dict
        Graphical data of the analysis

    """
    _results = oDict()
    graph_data = {}
    update = kwargs.get('update', True)
    pixel = kwargs.get('pixel', 3)
    filttype, filtsize = kwargs.get('filter', ['b', 5])
    lim = kwargs.get('range', [0, self.get_duration()])
    brAdjust = kwargs.get('brAdjust', True)
    thresh = kwargs.get('fieldThresh', 0.2)
    required_neighbours = kwargs.get('minPlaceFieldNeighbours', 9)
    smooth_place = kwargs.get('smoothPlace', False)
    # Can pass another NData object to estimate the border from
    # Can be useful in some cases, such as when the animal
    # only explores a subset of the arena.
    separate_border_data = kwargs.get(
        "separateBorderData", None)

    # Update the border to match the requested pixel size
    if separate_border_data is not None:
        self.set_border(
            separate_border_data.calc_border(**kwargs))
        times = self._time
        lower, upper = (times.min(), times.max())
        new_times = separate_border_data._time
        sample_spatial_idx = (
            (new_times <= upper) & (new_times >= lower)).nonzero()
        self._border_dist = self._border_dist[sample_spatial_idx]
    else:
        self.set_border(self.calc_border(**kwargs))

    xedges = self._xbound
    yedges = self._ybound
    xedges2 = other_spatial._xbound
    yedges2 = other_spatial._ybound

    spikeLoc = self.get_event_loc(ftimes, **kwargs)[1]
    posX = self._pos_x[np.logical_and(
        self.get_time() >= lim[0], self.get_time() <= lim[1])]
    posY = self._pos_y[np.logical_and(
        self.get_time() >= lim[0], self.get_time() <= lim[1])]

    # This is the main difference between regular place method.
    new_set, spike_count = self.bin_downsample(
        ftimes, other_spatial, other_ftimes,
        final_bins=[
            np.append(yedges, yedges[-1] + np.mean(np.diff(yedges))),
            np.append(xedges, xedges[-1] + np.mean(np.diff(xedges)))],
        sample_bin_amt=[len(xedges2) + 1, len(yedges2) + 1])
    posY = new_set[:, 1]
    posX = new_set[:, 0]

    tmap, yedges, xedges = histogram2d(posY, posX, yedges, xedges)
    if tmap.shape != spike_count.shape:
        print(tmap.shape)
        print(spike_count.shape)
        raise ValueError("Time map does not match firing map")

    tmap /= self.get_sampling_rate()

    ybin, xbin = tmap.shape
    xedges = np.arange(xbin) * pixel
    yedges = np.arange(ybin) * pixel

    fmap = np.divide(spike_count, tmap, out=np.zeros_like(
        spike_count), where=tmap != 0)

    if brAdjust:
        nfmap = fmap / fmap.max()
        if np.sum(np.logical_and(nfmap >= 0.2, tmap != 0)) >= 0.8 * nfmap[tmap != 0].flatten().shape[0]:
            back_rate = np.mean(
                fmap[np.logical_and(nfmap >= 0.2, nfmap < 0.4)])
            fmap -= back_rate
            fmap[fmap < 0] = 0

    if filttype is not None:
        smoothMap = smooth_2d(fmap, filttype, filtsize)
    else:
        smoothMap = fmap

    if smooth_place:
        pmap = smoothMap
    else:
        pmap = fmap

    pmap[tmap == 0] = None
    pfield, largest_group = NSpatial.place_field(
        pmap, thresh, required_neighbours)

    centroid = NSpatial.place_field_centroid(pfield, pmap, largest_group)
    # centroid is currently in co-ordinates, convert to pixels
    centroid = centroid * pixel + (pixel * 0.5)
    # flip x and y
    centroid = centroid[::-1]

    p_shape = pfield.shape
    maxes = [xedges.max(), yedges.max()]
    scales = (
        maxes[0] / p_shape[1],
        maxes[1] / p_shape[0])
    co_ords = np.array(np.where(pfield == largest_group))
    boundary = [[None, None], [None, None]]
    for i in range(2):
        j = (i + 1) % 2
        boundary[i] = (
            co_ords[j].min() * scales[i],
            np.clip((co_ords[j].max() + 1) * scales[i], 0, maxes[i]))
    inside_x = (
        (boundary[0][0] <= spikeLoc[0]) &
        (spikeLoc[0] <= boundary[0][1]))
    inside_y = (
        (boundary[1][0] <= spikeLoc[1]) &
        (spikeLoc[1] <= boundary[1][1]))
    co_ords = np.nonzero(np.logical_and(inside_x, inside_y))

    if update:
        _results['Spatial Skaggs'] = self.skaggs_info(fmap, tmap)
        _results['Spatial Sparsity'] = self.spatial_sparsity(fmap, tmap)
        _results['Spatial Coherence'] = np.corrcoef(
            fmap[tmap != 0].flatten(), smoothMap[tmap != 0].flatten())[0, 1]
        _results['Found strong place field'] = (largest_group != 0)
        _results['Place field Centroid x'] = centroid[0]
        _results['Place field Centroid y'] = centroid[1]
        _results['Place field Boundary x'] = boundary[0]
        _results['Place field Boundary y'] = boundary[1]
        _results['Number of Spikes in Place Field'] = co_ords[0].size
        _results['Percentage of Spikes in Place Field'] = co_ords[0].size * \
            100 / ftimes.size
        self.update_result(_results)

    smoothMap[tmap == 0] = None

    graph_data['posX'] = posX
    graph_data['posY'] = posY
    graph_data['fmap'] = fmap
    graph_data['smoothMap'] = smoothMap
    graph_data['firingMap'] = fmap
    graph_data['tmap'] = tmap
    graph_data['xedges'] = xedges
    graph_data['yedges'] = yedges
    graph_data['spikeLoc'] = spikeLoc
    graph_data['placeField'] = pfield
    graph_data['largestPlaceGroup'] = largest_group
    graph_data['placeBoundary'] = boundary
    graph_data['indicesInPlaceField'] = co_ords
    graph_data['centroid'] = centroid

    return graph_data
Beispiel #5
0
        while p_down_data == -1:
            p_down_data = spat1.downsample_place(ftimes1, spat2, ftimes2)
        for key in keys:
            results[key][i] = spat1.get_results()[key]
        spat1._results.clear()
    output_dict = {}
    for key in keys:
        output_dict[key] = np.nanmean(results[key])
    return output_dict, p_down_data


if __name__ == "__main__":
    """Some examples for testing the code on for correctness."""

    # Set up the recordings
    spatial = NSpatial()
    fname = r"D:\SubRet_recordings_imaging\muscimol_data\CanCSR8_muscimol\05102018\s3_after_smallsq\05102018_CanCSR8_smallsq_10_3_3.txt"
    spatial.set_filename(fname)
    spatial.load()

    spike = NSpike()
    fname = r"D:\SubRet_recordings_imaging\muscimol_data\CanCSR8_muscimol\05102018\s3_after_smallsq\05102018_CanCSR8_smallsq_10_3.3"
    spike.set_filename(fname)
    spike.load()
    spike.set_unit_no(1)

    spatial2 = NSpatial()
    fname = r"D:\SubRet_recordings_imaging\muscimol_data\CanCSR8_muscimol\05102018\s4_big sq\05102018_CanCSR8_bigsq_10_4_3.txt"
    spatial2.set_filename(fname)
    spatial2.load()
Beispiel #6
0
def test_nc_recording_loading(delete=False):
    from neurochat.nc_lfp import NLfp
    from neurochat.nc_spike import NSpike
    from neurochat.nc_spatial import NSpatial
    from simuran.loaders.nc_loader import NCLoader

    main_test_dir = os.path.join(main_dir, "tests", "resources", "temp",
                                 "axona")
    os.makedirs(main_test_dir, exist_ok=True)

    axona_files = fetch_axona_data()

    # Load using SIMURAN auto detection.
    ex = Recording(
        param_file=os.path.join(main_dir, "tests", "resources", "params",
                                "axona_test.py"),
        base_file=main_test_dir,
        load=False,
    )
    ex.signals[0].load()
    ex.units[0].load()
    ex.units[0].underlying.set_unit_no(1)
    ex.spatial.load()

    # Load using NeuroChaT
    lfp = NLfp()
    lfp.set_filename(
        os.path.join(main_test_dir, "010416b-LS3-50Hz10.V5.ms.eeg"))
    lfp.load(system="Axona")

    unit = NSpike()
    unit.set_filename(os.path.join(main_test_dir,
                                   "010416b-LS3-50Hz10.V5.ms.2"))
    unit.load(system="Axona")
    unit.set_unit_no(1)

    spatial = NSpatial()
    spatial.set_filename(
        os.path.join(main_test_dir, "010416b-LS3-50Hz10.V5.ms_2.txt"))
    spatial.load(system="Axona")

    assert np.all(ex.signals[0].underlying.get_samples() == lfp.get_samples())
    assert np.all(
        ex.units[0].underlying.get_unit_stamp() == unit.get_unit_stamp())
    assert np.all(
        ex.units[0].underlying.get_unit_tags() == unit.get_unit_tags())
    assert np.all(ex.spatial.underlying.get_pos_x() == spatial.get_pos_x())

    ncl = NCLoader()
    ncl.load_params["system"] = "Axona"
    loc = os.path.join(main_dir, "tests", "resources", "temp", "axona")
    file_locs, _ = ncl.auto_fname_extraction(
        loc,
        verbose=False,
        unit_groups=[
            2,
        ],
        sig_channels=[
            1,
        ],
    )
    clust_locs = [
        os.path.basename(f) for f in file_locs["Clusters"] if f is not None
    ]
    assert "010416b-LS3-50Hz10.V5.ms_2.cut" in clust_locs

    if delete:
        for f in axona_files:
            os.remove(f)
Beispiel #7
0
class NData():
    """
    The NData object is composed of data objects (NSpike(), NSpatial(), NLfp(),
    and Nhdf()) and is built upon the composite structural object pattern. 

    This data class is the main data element in NeuroChaT which delegates the 
    analysis and other operations to respective objects.

    """
    def __init__(self):
        """
        Attributes
        ----------
        spatial : NSpatial
            Spatial data object
        spike : NSpike
            Spike data object
        lfp : Nlfp
            LFP data object
        hdf : NHdf
            Object for manipulating HDF5 file
        data_format : str
            Recording system or format of the data file

        """

        super().__init__()
        self.spike = NSpike(name='C0')
        self.spatial = NSpatial(name='S0')
        self.lfp = NLfp(name='L0')
        self.data_format = 'Axona'
        self._results = oDict()
        self.hdf = Nhdf()

        self.__type = 'data'

    def subsample(self, sample_range):
        """
        Split up a data object in the collection into parts.

        Parameters
        ----------
        sample_range: tuple
            times in seconds to extract

        Returns
        -------
        NData
            subsampled version of initial ndata object
        """
        ndata = NData()
        if self.lfp.get_duration() != 0:
            ndata.lfp = self.lfp.subsample(sample_range)
        if self.spike.get_duration() != 0:
            ndata.spike = self.spike.subsample(sample_range)
        if self.spatial.get_duration() != 0:
            ndata.spatial = self.spatial.subsample(sample_range)

        return ndata

    def get_type(self):
        """
        Returns the type of object. For NData, this is always `data` type

        Parameters
        ----------
        None

        Returns
        -------
        str

        """
        return self.__type

    def get_results(self, spaces_to_underscores=False):
        """
        Returns the parametric results of the analyses

        Parameters
        ----------
        None

        Returns
        -------
        OrderedDict

        """
        if spaces_to_underscores:
            results = {
                x.replace(' ', '_'): v
                for x, v in self._results.items()
            }
            return results
        return self._results

    def update_results(self, results):
        """
        Adds new parametric results of the analyses

        Parameters
        ----------
        results : OrderedDict
            New analyses results (parametric)

        Returns
        -------
        None

        """

        self._results.update(results)

    def reset_results(self):
        """
        Reset the NData results to an empty OrderedDict

        Parameters
        ----------
        None

        Returns
        -------
        None

        """

        self._results = oDict()
        # self.spike.reset_results()
        # self.spatial.reset_results()
        # self.lfp.reset_results()

    def get_data_format(self):
        """
        Returns the recording system or data format

        Parameters
        ----------
        None

        Returns
        -------
        str

        """
        return self.data_format

    def set_data_format(self, data_format=None):
        """
        Returns the parametric results of the analyses

        Parameters
        ----------
        data_format : str
            Recording system or format of the data

        Returns
        -------
        None

        """

        if data_format is None:
            data_format = self.get_data_format()
        self.data_format = data_format
        self.spike.set_system(data_format)
        self.spatial.set_system(data_format)
        self.lfp.set_system(data_format)

    def load(self):
        """
        Loads the data from the filenames in each constituing objects, i.e, 
        spatial,  spike and LFP 

        Parameters
        ----------
        None

        Returns
        -------
        None

        """
        self.load_spike()
        self.load_spatial()
        self.load_lfp()

    def save_to_hdf5(self):
        """
        Stores the spatial, spike and LFP datasets to HDF5 file 

        Parameters
        ----------
        None

        Returns
        -------
        None

        """

        try:
            self.hdf.save_object(obj=self.spike)
        except:
            logging.warning(
                'Error in exporting NSpike data from NData object to the hdf5 file!'
            )

        try:
            self.hdf.save_object(obj=self.spatial)
        except:
            logging.warning(
                'Error in exporting NSpatial data from NData object to the hdf5 file!'
            )

        try:
            self.hdf.save_object(obj=self.lfp)
        except:
            logging.warning(
                'Error in exporting NLfp data from NData object to the hdf5 file!'
            )

    def set_unit_no(self, unit_no):
        """
        Sets the unit number of the spike dataset to analyse

        Parameters
        ----------
        unit_no : int
            Unit or cell number to analyse

        Returns
        -------
        None

        """

        self.spike.set_unit_no(unit_no)

    def set_spike_name(self, name='C0'):
        """
        Sets the name of the spike dataset

        Parameters
        ----------
        name : str
            Name of the spike dataset

        Returns
        -------
        None

        """

        self.spike.set_name(name)

    def set_spike_file(self, filename):
        """
        Sets the filename of the spike dataset

        Parameters
        ----------
        filename : str
            Full file directory of the spike dataset

        Returns
        -------
        None

        """

        self.spike.set_filename(filename)

    def get_spike_file(self):
        """
        Gets the filename of the spike dataset

        Parameters
        ----------
        None

        Returns
        -------
        str
            Filename of the spike dataset
        """

        return self.spike.get_filename()

    def load_spike(self):
        """
        Loads spike dataset from the file to NSpike() object

        Parameters
        ----------
        None        
        Returns
        -------
        None

        """

        self.spike.load()

    def set_spatial_file(self, filename):
        """
        Sets the filename of the spatial dataset

        Parameters
        ----------
        filename : str
            Full file directory of the spike dataset

        Returns
        -------
        None

        """
        self.spatial.set_filename(filename)

    def get_spatial_file(self):
        """
        Gets the filename of the spatial dataset

        Parameters
        ----------
        None

        Returns
        -------
        str
            Filename of the spatial dataset

        """
        return self.spatial.get_filename()

    def set_spatial_name(self, name):
        """
        Sets the name of the spatial dataset

        Parameters
        ----------
        name : str
            Name of the spatial dataset

        Returns
        -------
        None

        """

        self.spatial.set_name(name)

    def load_spatial(self):
        """
        Loads spatial dataset from the file to NSpatial() object

        Parameters
        ----------
        filename : str
            Full file directory of the spike dataset

        Returns
        -------
        None

        """
        self.spatial.load()

    def set_lfp_file(self, filename):
        """
        Sets the filename of the LFP dataset

        Parameters
        ----------
        filename : str
            Full file directory of the spike dataset

        Returns
        -------
        None

        """
        self.lfp.set_filename(filename)

    def get_lfp_file(self):
        """
        Gets the filename of the LFP dataset

        Parameters
        ----------
        None

        Returns
        -------
        str
            Filename of the LFP dataset
        """

        return self.lfp.get_filename()

    def set_lfp_name(self, name):
        """
        Sets the name of the NLfp() object

        Parameters
        ----------
        name : str
            Name of the LFP dataset

        Returns
        -------
        None

        """

        self.lfp.set_name(name)

    def load_lfp(self):
        """
        Loads LFP dataset to NLfp() object

        Parameters
        ----------
        None

        Returns
        -------
        None

        """

        self.lfp.load()

    # Forwarding to analysis
    def wave_property(self):
        """
        Analysis of wavefor characteristics of the spikes of a unit

        Delegates to NSpike().wave_property()

        Parameters
        ----------
        None        

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spike.NSpike().wave_property

        """

        gdata = self.spike.wave_property()
        self.update_results(self.spike.get_results())

        return gdata

    def isi(self,
            bins='auto',
            bound=None,
            density=False,
            refractory_threshold=2):
        """
        Analysis of ISI histogram

        Delegates to NSpike().isi()

        Parameters
        ----------
        bins : str or int
            Number of ISI histogram bins. If 'auto', NumPy default is used

        bound : int
            Length of the ISI histogram in msec
        density : bool
            If true, normalized historagm is calcultaed
        refractory_threshold : int
            Length of the refractory period in msec

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spike.NSpike().isi

        """
        gdata = self.spike.isi(bins, bound, density, refractory_threshold)
        self.update_results(self.spike.get_results())

        return gdata

    def isi_auto_corr(self, spike=None, **kwargs):
        """
        Analysis of ISI autocrrelation histogram

        Delegates to NSpike().isi_corr()

        Parameters
        ----------
        spike : NSpike()
            If specified, it calulates cross-correlation.

        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spike.NSpike().isi_corr, nc_spike.NSpike().psth

        """

        gdata = self.spike.isi_corr(spike, **kwargs)

        return gdata

    def burst(self, burst_thresh=5, ibi_thresh=50):
        """
        Burst analysis of spik-train

        Delegates to NSpike().burst()

        Parameters
        ----------
        burst_thresh : int
            Minimum ISI between consecutive spikes in a burst

        ibi_thresh : int
            Minimum inter-burst interval between two bursting groups of spikes

        Returns
        -------
        None

        See also
        --------
        nc_spike.NSpike().burst

        """

        self.spike.burst(burst_thresh, ibi_thresh=ibi_thresh)
        self.update_results(self.spike.get_results())

    def theta_index(self, **kwargs):
        """
        Calculates theta-modulation of spike-train ISI autocorrelation histogram.

        Delegates to NSpike().theta_index()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spike.NSpike().theta_index()

        """

        gdata = self.spike.theta_index(**kwargs)
        self.update_results(self.spike.get_results())

        return gdata

    def theta_skip_index(self, **kwargs):
        """
        Calculates theta-skipping of spike-train ISI autocorrelation histogram.

        Delegates to NSpike().theta_skip_index()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spike.NSpike().theta_skip_index()

        """

        gdata = self.spike.theta_skip_index(**kwargs)
        self.update_results(self.spike.get_results())

        return gdata

    def bandpower_ratio(self, first_band, second_band, win_sec, **kwargs):
        """
        Calculate the ratio in power between two bandpass filtered signals.

        Delegates to NLfp.bandpower_ratio()
        Suggested [5, 11] and [1.5, 4 bands]


        Parameters
        ----------
        first_band, second_band, win_sec, **kwargs

        See also
        --------
        nc_lfp.NLfp.bandpower_ratio()
        """

        self.lfp.bandpower_ratio(first_band, second_band, win_sec, **kwargs)
        self.update_results(self.lfp.get_results())

    def spectrum(self, **kwargs):
        """
        Analyses frequency spectrum of the LFP signal

        Delegates to NLfp().spectrum()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_lfp.NLfp().spectrum()

        """

        gdata = self.lfp.spectrum(**kwargs)

        return gdata

    def phase_dist(self, **kwargs):
        """
        Analysis of spike to LFP phase distribution

        Delegates to NLfp().phase_dist()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_lfp.NLfp().phase_dist()

        """

        gdata = self.lfp.phase_dist(self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.lfp.get_results())

        return gdata

    def phase_at_spikes(self, **kwargs):
        """
        Analysis of spike to LFP phase distribution

        Delegates to NLfp().phase_dist()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        phases, times, positions

        See also
        --------
        nc_lfp.NLfp().phase_at_events()

        """
        key = "keep_zero_idx"
        out_data = {}
        if not key in kwargs.keys():
            kwargs[key] = True
        should_filter = kwargs.get("should_filter", True)

        ftimes = self.spike.get_unit_stamp()
        phases = self.lfp.phase_at_events(ftimes, **kwargs)
        _, positions, directions = self.get_event_loc(ftimes, **kwargs)

        if should_filter:
            place_data = self.place(**kwargs)
            boundary = place_data["placeBoundary"]
            co_ords = place_data["indicesInPlaceField"]
            largest_group = place_data["largestPlaceGroup"]

            out_data["good_place"] = (largest_group != 0)
            out_data["phases"] = phases[co_ords]
            out_data["times"] = ftimes[co_ords]
            out_data["directions"] = directions[co_ords]
            out_data["positions"] = [
                positions[0][co_ords], positions[1][co_ords]
            ]
            out_data["boundary"] = boundary

        else:
            out_data["phases"] = phases
            out_data["times"] = ftimes
            out_data["positions"] = positions
            out_data["directions"] = directions

        self.update_results(self.get_results())
        return out_data

    def plv(self, **kwargs):
        """
        Calculates phase-locking value of the spike train to underlying LFP signal.

        Delegates to NLfp().plv()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_lfp.NLfp().plv()

        """

        gdata = self.lfp.plv(self.spike.get_unit_stamp(), **kwargs)

        return gdata

    # def sfc(self, **kwargs):
    # """
    # Calculates spike-field coherence of spike train with underlying LFP signal.

    # Delegates to NLfp().sfc()

    # Parameters
    # ----------
    # **kwargs
    #     Keyword arguments

    # Returns
    # -------
    # dict
    #     Graphical data of the analysis

    # See also
    # --------
    # nc_lfp.NLfp().sfc()

    # """

    # gdata = self.lfp.plv(self.spike.get_unit_stamp(), **kwargs)

    # return gdata

    def event_trig_average(self, **kwargs):
        """
        Averaging event-triggered LFP signals

        Delegates to NLfp().event_trig_average()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_lfp.NLfp().event_trig_average()

        """

        gdata = self.lfp.event_trig_average(self.spike.get_unit_stamp(),
                                            **kwargs)

        return gdata

    def spike_lfp_causality(self, **kwargs):
        """
        Analyses spike to underlying LFP causality

        Delegates to NLfp().spike_lfp_causality()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_lfp.NLfp().spike_lfp_causality()

        """

        gdata = self.lfp.spike_lfp_causality(self.spike.get_unit_stamp(),
                                             **kwargs)
        self.update_results(self.lfp.get_results())

        return gdata

    def speed(self, **kwargs):
        """
        Analysis of unit correlation with running speed

        Delegates to NSpatial().speed()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().speed()

        """

        gdata = self.spatial.speed(self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def angular_velocity(self, **kwargs):
        """
        Analysis of unit correlation to angular head velocity (AHV) of the animal

        Delegates to NSpatial().angular_velocity()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().angular_velocity()

        """

        gdata = self.spatial.angular_velocity(self.spike.get_unit_stamp(),
                                              **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def place(self, **kwargs):
        """
        Analysis of place cell firing characteristics

        Delegates to NSpatial().place()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().place()

        """

        gdata = self.spatial.place(self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    # Created by Sean Martin: 13/02/2019
    def place_field_centroid_zscore(self, **kwargs):
        """
        Calculates a very simple centroid of place field

        Delegates to NSpatial().place_field()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        ndarray
            Centroid of the place field

        See also
        --------
        nc_spatial.NSpatial().place_field()

        """

        gdata = self.spatial.place_field_centroid_zscore(
            self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def loc_time_lapse(self, **kwargs):
        """
        Time-lapse firing proeprties of the unit with respect to location

        Delegates to NSpatial().loc_time_lapse()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().loc_time_lapse()

        """
        gdata = self.spatial.loc_time_lapse(self.spike.get_unit_stamp(),
                                            **kwargs)

        return gdata

    def loc_shuffle(self, **kwargs):
        """
        Shuffling analysis of the unit to  see if the locational firing specifity
        is by chance or actually correlated to the location of the animal

        Delegates to NSpatial().loc_shuffle()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().loc_shuffle()

        """

        gdata = self.spatial.loc_shuffle(self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def loc_shift(self, shift_ind=np.arange(-10, 11), **kwargs):
        """
        Analysis of firing specificity of the unit with respect to animal's location
        to oberve whether it represents past location of the animal or anicipates a
        future location.

        Delegates to NSpatial().loc_shift()

        Parameters
        ----------
        shift_ind : ndarray
            Index of spatial resolution shift for the spike event time. Shift -1
            implies shift to the past by 1 spatial time resolution, and +2 implies
            shift to the future by 2 spatial time resoultion.
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().loc_shift()

        """

        gdata = self.spatial.loc_shift(self.spike.get_unit_stamp(),
                                       shift_ind=shift_ind,
                                       **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def loc_auto_corr(self, **kwargs):
        """
        Calculates the two-dimensional correlation of firing map which is the
        map of the firing rate of the animal with respect to its location

        Delegates to NSpatial().loc_auto_corr()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().loc_auto_corr()

        """
        gdata = self.spatial.loc_auto_corr(self.spike.get_unit_stamp(),
                                           **kwargs)

        return gdata

    def loc_rot_corr(self, **kwargs):
        """
        Calculates the rotational correlation of the locational firing rate of the animal with
        respect to location, also called firing map

        Delegates to NSpatial().loc_rot_corr()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().loc_rot_corr()

        """

        gdata = self.spatial.loc_rot_corr(self.spike.get_unit_stamp(),
                                          **kwargs)

        return gdata

    def hd_rate(self, **kwargs):
        """
        Analysis of the firing characteristics of a unit with respect to animal's
        head-direction

        Delegates to NSpatial().hd_rate()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().hd_rate()

        """

        gdata = self.spatial.hd_rate(self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def hd_rate_ccw(self, **kwargs):
        """
        Analysis of the firing characteristics of a unit with respect to animal's
        head-direction split into clockwise and counterclockwised directions

        Delegates to NSpatial().hd_rate_ccw()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().hd_rate_ccw()

        """

        gdata = self.spatial.hd_rate_ccw(self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def hd_time_lapse(self):
        """
        Time-lapse firing proeprties of the unit with respect to the head-direction
        of the animal

        Delegates to NSpatial().hd_time_lapse()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().hd_time_lapse()

        """

        gdata = self.spatial.hd_time_lapse(self.spike.get_unit_stamp())

        return gdata

    def hd_shuffle(self, **kwargs):
        """
        Shuffling analysis of the unit to see if the head-directional firing specifity
        is by chance or actually correlated to the head-direction of the animal

        Delegates to NSpatial().hd_shuffle()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().hd_shuffle()

        """

        gdata = self.spatial.hd_shuffle(self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def hd_shift(self, shift_ind=np.arange(-10, 11), **kwargs):
        """
        Analysis of firing specificity of the unit with respect to animal's head
        direction to oberve whether it represents past direction or anicipates a
        future direction.

        Delegates to NSpatial().hd_shift()

        Parameters
        ----------
        shift_ind : ndarray
            Index of spatial resolution shift for the spike event time. Shift -1
            implies shift to the past by 1 spatial time resolution, and +2 implies
            shift to the future by 2 spatial time resoultion.
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().speed()

        """

        gdata = self.spatial.hd_shift(self.spike.get_unit_stamp(),
                                      shift_ind=shift_ind)
        self.update_results(self.spatial.get_results())

        return gdata

    def border(self, **kwargs):
        """
        Analysis of the firing characteristic of a unit with respect to the
        environmental border

        Delegates to NSpatial().border()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().border()

        """

        gdata = self.spatial.border(self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def gradient(self, **kwargs):
        """
        Analysis of gradient cell, a unit whose firing rate gradually increases 
        as the animal traverses from the border to the cneter of the environment

        Delegates to NSpatial().gradient()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().gradient()

        """

        gdata = self.spatial.gradient(self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def grid(self, **kwargs):
        """
        Analysis of Grid cells characterised by formation of grid-like pattern
        of high activity in the firing-rate map

        Delegates to NSpatial().grid()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().grid()

        """

        gdata = self.spatial.grid(self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def multiple_regression(self, **kwargs):
        """
        Multiple-rgression analysis where firing rate for each variable, namely
        location, head-direction, speed, AHV, and distance from border, are used
        to regress the instantaneous firing rate of the unit.

        Delegates to NSpatial().multiple_regression()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        dict
            Graphical data of the analysis

        See also
        --------
        nc_spatial.NSpatial().multiple-regression()

        """
        gdata = self.spatial.multiple_regression(self.spike.get_unit_stamp(),
                                                 **kwargs)
        self.update_results(self.spatial.get_results())

        return gdata

    def interdependence(self, **kwargs):
        """
        Interdependence analysis where firing rate of each variable is predicted
        from another variable and the distributive ratio is measured between the
        predicted firing rate and the caclulated firing rate.

        Delegates to NSpatial().interdependence()

        Parameters
        ----------
        **kwargs
            Keyword arguments

        Returns
        -------
        None

        See also
        --------
        nc_spatial.NSpatial().interdependence()

        """

        self.spatial.interdependence(self.spike.get_unit_stamp(), **kwargs)
        self.update_results(self.spatial.get_results())

    def __getattr__(self, arg):
        """
        Sets precedence for delegation with NSpike() > NLfp() > NSpatial()
        Parameters
        ----------
        arg : str
            Name of the function ot attributes to look for

        """

        if hasattr(self.spike, arg):
            return getattr(self.spike, arg)
        elif hasattr(self.lfp, arg):
            return getattr(self.lfp, arg)
        elif hasattr(self.spatial, arg):
            return getattr(self.spatial, arg)
        else:
            logging.warning(
                'No ' + arg +
                ' method or attribute in NeuroData or in composing data class')
Beispiel #8
0
class NCLoader(BaseLoader):
    """Load data compatible with the NeuroChaT package."""
    def __init__(self, load_params={}):
        """Call super class initialize."""
        super().__init__(load_params=load_params)

    def load_signal(self, *args, **kwargs):
        """
        Call the NeuroChaT NLfp.load method.

        Returns
        -------
        dict
            The keys of this dictionary are saved as attributes
            in simuran.signal.BaseSignal.load()
        """
        self.signal = NLfp()
        self.signal.load(*args, self.load_params["system"])
        return {
            "underlying": self.signal,
            "timestamps": self.signal.get_timestamp() * u.s,
            "samples": self.signal.get_samples() * u.mV,
            "date": self.signal.get_date(),
            "time": self.signal.get_time(),
            "channel": self.signal.get_channel_id(),
        }

    def load_spatial(self, *args, **kwargs):
        """
        Call the NeuroChaT NSpatial.load method.

        Returns
        -------
        dict
            The keys of this dictionary are saved as attributes
            in simuran.single_unit.SingleUnit.load()
        """
        self.spatial = NSpatial()
        self.spatial.load(*args, self.load_params["system"])
        return {
            "underlying":
            self.spatial,
            "date":
            self.spatial.get_date(),
            "time":
            self.spatial.get_time(),
            "speed":
            self.spatial.get_speed() * (u.cm / u.s),
            "position": (
                self.spatial.get_pos_x() * u.cm,
                self.spatial.get_pos_y() * u.cm,
            ),
            "direction":
            self.spatial.get_direction() * u.deg,
        }

    def load_single_unit(self, *args, **kwargs):
        """
        Call the NeuroChaT NSpike.load method.

        Returns
        -------
        dict
            The keys of this dictionary are saved as attributes
            in simuran.spatial.Spatial.load()

        """
        fname, clust_name = args
        if clust_name is not None:
            self.single_unit = NSpike()
            self.single_unit.load(fname, self.load_params["system"])
            waveforms = deepcopy(self.single_unit.get_waveform())
            for chan, val in waveforms.items():
                waveforms[chan] = val * u.uV
            return {
                "underlying": self.single_unit,
                "timestamps": self.single_unit.get_timestamp() * u.s,
                "unit_tags": self.single_unit.get_unit_tags(),
                "waveforms": waveforms,
                "date": self.single_unit.get_date(),
                "time": self.single_unit.get_time(),
                "available_units": self.single_unit.get_unit_list(),
                # "units_to_use": self.single_unit.get_unit_list(),
            }
        else:
            return None

    def auto_fname_extraction(self, base, **kwargs):
        """
        Extract all filenames relevant to the recording from base.

        Parameters
        ----------
        base : str
            Where to start looking from.
            For Axona, this should be a .set file,
            or a directory containing exactly one .set file

        Returns
        -------
        fnames : dict
            A dictionary listing the filenames involved in loading.
        base : str
            The base file name, in Axona this is a .set file.

        TODO
        ----
        Expand to support nwb and neuralynx as well as Axona.

        """
        # Currently only implemented for Axona systems
        error_on_missing = self.load_params.get("enforce_data", True)

        if self.load_params["system"] == "Axona":

            # Find the set file if a directory is passed
            if os.path.isdir(base):
                set_files = get_all_files_in_dir(base, ext="set")
                if len(set_files) == 0:
                    print("WARNING: No set files found in {}, skipping".format(
                        base))
                    return None, None
                elif len(set_files) > 1:
                    raise ValueError(
                        "Found more than one set file, found {}".format(
                            len(set_files)))
                base = set_files[0]
            elif not os.path.isfile(base):
                raise ValueError("{} is not a file or directory".format(base))

            joined_params = {**self.load_params, **kwargs}
            cluster_extension = joined_params.get("cluster_extension", ".cut")
            clu_extension = joined_params.get("clu_extension", ".clu.X")
            pos_extension = joined_params.get("pos_extension", ".pos")
            lfp_extension = joined_params.get("lfp_extension",
                                              ".eeg")  # eeg or egf
            stm_extension = joined_params.get("stm_extension", ".stm")
            tet_groups = joined_params.get("unit_groups", None)
            channels = joined_params.get("sig_channels", None)

            filename = os.path.splitext(base)[0]
            base_filename = os.path.splitext(os.path.basename(base))[0]

            # Extract the tetrode and cluster data
            spike_names_all = []
            cluster_names_all = []
            if tet_groups is None:
                tet_groups = [
                    x for x in range(0, 64)
                    if os.path.exists(filename + "." + str(x))
                ]
            if channels is None:
                channels = [
                    x for x in range(2, 256)
                    if os.path.exists(filename + lfp_extension + str(x))
                ]
                if os.path.exists(filename + lfp_extension):
                    channels = [1] + channels
            for tetrode in tet_groups:
                spike_name = filename + "." + str(tetrode)
                if not os.path.isfile(spike_name):
                    e_msg = "Axona data is not available for {}".format(
                        spike_name)
                    if error_on_missing:
                        raise ValueError(e_msg)
                    else:
                        logging.warning(e_msg)
                        return None, base

                spike_names_all.append(spike_name)

                cut_name = filename + "_" + str(tetrode) + cluster_extension
                clu_name = filename + clu_extension[:-1] + str(tetrode)
                if os.path.isfile(cut_name):
                    cluster_name = cut_name
                elif os.path.isfile(clu_name):
                    cluster_name = clu_name
                else:
                    cluster_name = None
                cluster_names_all.append(cluster_name)

            # Extract the positional data
            output_list = [None, None]
            for i, ext in enumerate([pos_extension, stm_extension]):
                for fname in get_all_files_in_dir(
                        os.path.dirname(base),
                        ext=ext,
                        return_absolute=False,
                        case_sensitive_ext=True,
                ):
                    if ext == ".txt":
                        if fname[:len(base_filename) +
                                 1] == base_filename + "_":
                            name = os.path.join(os.path.dirname(base), fname)
                            output_list[i] = name
                            break
                    else:
                        if fname[:len(base_filename)] == base_filename:
                            name = os.path.join(os.path.dirname(base), fname)
                            output_list[i] = name
                            break
            spatial_name, stim_name = output_list

            base_sig_name = filename + lfp_extension
            signal_names = []
            for c in channels:
                if c != 1:
                    if os.path.exists(base_sig_name + str(c)):
                        signal_names.append(base_sig_name + str(c))
                    else:
                        e_msg = "{} does not exist".format(base_sig_name +
                                                           str(c))
                        if error_on_missing:
                            raise ValueError(e_msg)
                        else:
                            logging.warning(e_msg)
                            return None, base
                else:
                    if os.path.exists(base_sig_name):
                        signal_names.append(base_sig_name)
                    else:
                        e_msg = "{} does not exist".format(base_sig_name)
                        if error_on_missing:
                            raise ValueError(e_msg)
                        else:
                            logging.warning(e_msg)
                            return None, base

            file_locs = {
                "Spike": spike_names_all,
                "Clusters": cluster_names_all,
                "Spatial": spatial_name,
                "Signal": signal_names,
                "Stimulation": stim_name,
            }
            return file_locs, base
        else:
            raise ValueError(
                "auto_fname_extraction only implemented for Axona")

    def index_files(self, folder, **kwargs):
        """Find all available neurochat files in the given folder"""
        if self.load_params["system"] == "Axona":
            set_files = []
            root_folders = []
            times = []
            durations = []
            print("Finding all .set files...")
            files = get_all_files_in_dir(
                folder,
                ext=".set",
                recursive=True,
                return_absolute=True,
                case_sensitive_ext=True,
            )
            print(f"Found {len(set_files)} set files")

            for fname in tqdm(files, desc="Processing files"):
                set_files.append(os.path.basename(fname))
                root_folders.append(os.path.normpath(os.path.dirname(fname)))
                with open(fname) as f:
                    f.readline()
                    t = f.readline()[-9:-2]
                    try:
                        int(t[:2])
                        times.append(t)
                        f.readline()
                        f.readline()
                        durations.append(f.readline()[-11:-8])
                    except:
                        if len(times) != len(set_files):
                            times.append(np.nan)
                        if len(durations) != len(set_files):
                            durations.append(np.nan)

            headers = ["filename", "folder", "time", "duration"]
            in_list = [set_files, root_folders, times, durations]
            results_df = list_to_df(in_list, transpose=True, headers=headers)
            return results_df
        else:
            raise ValueError(
                "auto_fname_extraction only implemented for Axona")
Beispiel #9
0
def downsample_place(self, ftimes, other_spatial, other_ftimes, **kwargs):
    """
    Calculates the two-dimensional firing rate of the unit with respect to
    the location of the animal in the environment. This is called Firing map.

    Specificity indices are measured to assess the quality of location-specific firing of the unit.

    This method also plot the events of spike occurring superimposed on the
    trace of the animal in the arena, commonly known as Spike Plot.

    Parameters
    ----------
    ftimes : ndarray
        Timestamps of the spiking activity of a unit
    **kwargs
        Keyword arguments

    Returns
    -------
    dict
        Graphical data of the analysis
    """

    _results = oDict()
    graph_data = {}
    update = kwargs.get("update", True)
    pixel = kwargs.get("pixel", 3)
    chop_bound = kwargs.get("chop_bound", 5)
    filttype, filtsize = kwargs.get("filter", ["b", 5])
    lim = kwargs.get("range", [0, self.get_duration()])
    brAdjust = kwargs.get("brAdjust", True)
    thresh = kwargs.get("fieldThresh", 0.2)
    required_neighbours = kwargs.get("minPlaceFieldNeighbours", 9)
    smooth_place = kwargs.get("smoothPlace", False)
    # Can pass another NData object to estimate the border from
    # Can be useful in some cases, such as when the animal
    # only explores a subset of the arena.
    separate_border_data = kwargs.get("separateBorderData", None)

    # xedges = np.arange(0, np.ceil(np.max(self._pos_x)), pixel)
    # yedges = np.arange(0, np.ceil(np.max(self._pos_y)), pixel)

    # Update the border to match the requested pixel size
    if separate_border_data is not None:
        self.set_border(separate_border_data.calc_border(**kwargs))
        times = self._time
        lower, upper = (times.min(), times.max())
        new_times = separate_border_data._time
        sample_spatial_idx = ((new_times <= upper) &
                              (new_times >= lower)).nonzero()
        self._border_dist = self._border_dist[sample_spatial_idx]
    else:
        self.set_border(self.calc_border(**kwargs))

    xedges = self._xbound
    yedges = self._ybound
    xedges2 = other_spatial._xbound
    yedges2 = other_spatial._ybound

    spikeLoc = self.get_event_loc(ftimes, **kwargs)[1]
    posX = self._pos_x[np.logical_and(self.get_time() >= lim[0],
                                      self.get_time() <= lim[1])]
    posY = self._pos_y[np.logical_and(self.get_time() >= lim[0],
                                      self.get_time() <= lim[1])]

    new_set, spike_count = self.bin_downsample(
        ftimes,
        other_spatial,
        other_ftimes,
        final_bins=[
            np.append(yedges, yedges[-1] + np.mean(np.diff(yedges))),
            np.append(xedges, xedges[-1] + np.mean(np.diff(xedges))),
        ],
        sample_bin_amt=[len(xedges2) + 1, len(yedges2) + 1],
    )
    posY = new_set[:, 1]
    posX = new_set[:, 0]

    tmap, yedges, xedges = histogram2d(posY, posX, yedges, xedges)
    if tmap.shape != spike_count.shape:
        print(tmap.shape)
        print(spike_count.shape)
        raise ValueError("Time map does not match firing map")

    tmap /= self.get_sampling_rate()

    ybin, xbin = tmap.shape
    xedges = np.arange(xbin) * pixel
    yedges = np.arange(ybin) * pixel

    fmap = np.divide(spike_count,
                     tmap,
                     out=np.zeros_like(spike_count),
                     where=tmap != 0)
    if fmap.max() == 0:
        print("No firing information!")
        return -1
    if brAdjust:
        nfmap = fmap / fmap.max()
        if (np.sum(np.logical_and(nfmap >= 0.2, tmap != 0)) >=
                0.8 * nfmap[tmap != 0].flatten().shape[0]):
            back_rate = np.mean(fmap[np.logical_and(nfmap >= 0.2,
                                                    nfmap < 0.4)])
            fmap -= back_rate
            fmap[fmap < 0] = 0

    if filttype is not None:
        smoothMap = smooth_2d(fmap, filttype, filtsize)
    else:
        smoothMap = fmap

    if smooth_place:
        pmap = smoothMap
    else:
        pmap = fmap

    pmap[tmap == 0] = None
    pfield, largest_group = NSpatial.place_field(pmap, thresh,
                                                 required_neighbours)
    # if largest_group == 0:
    #     if smooth_place:
    #         info = "where the place field was calculated from smoothed data"
    #     else:
    #         info = "where the place field was calculated from raw data"
    #     logging.info(
    #         "Lack of high firing neighbours to identify place field " +
    #         info)
    centroid = NSpatial.place_field_centroid(pfield, pmap, largest_group)
    # centroid is currently in co-ordinates, convert to pixels
    centroid = centroid * pixel + (pixel * 0.5)
    # flip x and y
    centroid = centroid[::-1]

    p_shape = pfield.shape
    maxes = [xedges.max(), yedges.max()]
    scales = (maxes[0] / p_shape[1], maxes[1] / p_shape[0])
    co_ords = np.array(np.where(pfield == largest_group))
    boundary = [[None, None], [None, None]]
    for i in range(2):
        j = (i + 1) % 2
        boundary[i] = (
            co_ords[j].min() * scales[i],
            np.clip((co_ords[j].max() + 1) * scales[i], 0, maxes[i]),
        )
    inside_x = (boundary[0][0] <= spikeLoc[0]) & (spikeLoc[0] <=
                                                  boundary[0][1])
    inside_y = (boundary[1][0] <= spikeLoc[1]) & (spikeLoc[1] <=
                                                  boundary[1][1])
    co_ords = np.nonzero(np.logical_and(inside_x, inside_y))

    if update:
        _results["Spatial Skaggs"] = self.skaggs_info(fmap, tmap)
        _results["Spatial Sparsity"] = self.spatial_sparsity(fmap, tmap)
        _results["Spatial Coherence"] = np.corrcoef(
            fmap[tmap != 0].flatten(), smoothMap[tmap != 0].flatten())[0, 1]
        _results["Found strong place field"] = largest_group != 0
        _results["Place field Centroid x"] = centroid[0]
        _results["Place field Centroid y"] = centroid[1]
        _results["Place field Boundary x"] = boundary[0]
        _results["Place field Boundary y"] = boundary[1]
        _results["Number of Spikes in Place Field"] = co_ords[0].size
        _results["Percentage of Spikes in Place Field"] = (co_ords[0].size *
                                                           100 / ftimes.size)
        self.update_result(_results)

    smoothMap[tmap == 0] = None

    graph_data["posX"] = posX
    graph_data["posY"] = posY
    graph_data["fmap"] = fmap
    graph_data["smoothMap"] = smoothMap
    graph_data["firingMap"] = fmap
    graph_data["tmap"] = tmap
    graph_data["xedges"] = xedges
    graph_data["yedges"] = yedges
    graph_data["spikeLoc"] = spikeLoc
    graph_data["placeField"] = pfield
    graph_data["largestPlaceGroup"] = largest_group
    graph_data["placeBoundary"] = boundary
    graph_data["indicesInPlaceField"] = co_ords
    graph_data["centroid"] = centroid

    return graph_data