コード例 #1
0
 def configure(self, time_series, nfft=None):
     """
     Store the input shape to be later used to estimate memory usage.
     Also create the algorithm instance.
     """
     self.input_shape = time_series.read_data_shape()
     log_debug_array(LOG, time_series, "time_series")
     
     ##-------------------- Fill Algorithm for Analysis -------------------##
     self.algorithm = NodeCoherence()
     if nfft is not None:
         self.algorithm.nfft = nfft
コード例 #2
0
 def configure(self, time_series, nfft=None):
     """
     Store the input shape to be later used to estimate memory usage.
     Also create the algorithm instance.
     """
     self.input_time_series_index = time_series
     self.input_shape = (self.input_time_series_index.data_length_1d,
                         self.input_time_series_index.data_length_2d,
                         self.input_time_series_index.data_length_3d,
                         self.input_time_series_index.data_length_4d)
     LOG.debug("Time series shape is %s" % str(self.input_shape))
     # -------------------- Fill Algorithm for Analysis -------------------##
     self.algorithm = NodeCoherence()
     if nfft is not None:
         self.algorithm.nfft = nfft
コード例 #3
0
 def configure(self, view_model):
     # type: (NodeCoherenceModel) -> None
     """
     Store the input shape to be later used to estimate memory usage.
     Also create the algorithm instance.
     """
     self.input_time_series_index = self.load_entity_by_gid(view_model.time_series.hex)
     self.input_shape = (self.input_time_series_index.data_length_1d,
                         self.input_time_series_index.data_length_2d,
                         self.input_time_series_index.data_length_3d,
                         self.input_time_series_index.data_length_4d)
     self.log.debug("Time series shape is %s" % str(self.input_shape))
     # -------------------- Fill Algorithm for Analysis -------------------##
     self.algorithm = NodeCoherence()
     if view_model.nfft is not None:
         self.algorithm.nfft = view_model.nfft
コード例 #4
0
 def get_input_tree(self):
     """
     Return a list of lists describing the interface to the analyzer. This
     is used by the GUI to generate the menus and fields necessary for
     defining a simulation.
     """
     algorithm = NodeCoherence()
     algorithm.trait.bound = self.INTERFACE_ATTRIBUTES_ONLY
     tree = algorithm.interface[self.INTERFACE_ATTRIBUTES]
     for node in tree:
         if node['name'] == 'time_series':
             node['conditions'] = FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                              operations=["=="], values=[4])
     return tree
コード例 #5
0
class NodeCoherenceAdapter(ABCAsynchronous):
    """ TVB adapter for calling the NodeCoherence algorithm. """
    
    _ui_name = "Cross coherence of nodes"
    _ui_description = "Compute Node Coherence for a TimeSeries input DataType."
    _ui_subsection = "coherence"
    
    
    def get_input_tree(self):
        """
        Return a list of lists describing the interface to the analyzer. This
        is used by the GUI to generate the menus and fields necessary for
        defining a simulation.
        """
        algorithm = NodeCoherence()
        algorithm.trait.bound = self.INTERFACE_ATTRIBUTES_ONLY
        tree = algorithm.interface[self.INTERFACE_ATTRIBUTES]
        for node in tree:
            if node['name'] == 'time_series':
                node['conditions'] = FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'],
                                                 operations=["=="], values=[4])
        return tree
    
    
    def get_output(self):
        return [CoherenceSpectrum]


    def configure(self, time_series, nfft=None):
        """
        Store the input shape to be later used to estimate memory usage.
        Also create the algorithm instance.
        """
        self.input_shape = time_series.read_data_shape()
        log_debug_array(LOG, time_series, "time_series")
        
        ##-------------------- Fill Algorithm for Analysis -------------------##
        self.algorithm = NodeCoherence()
        if nfft is not None:
            self.algorithm.nfft = nfft


    def get_required_memory_size(self, **kwargs):
        """
        Return the required memory to run this algorithm.
        """
        used_shape = (self.input_shape[0], 1, self.input_shape[2], self.input_shape[3])
        input_size = numpy.prod(used_shape) * 8.0
        output_size = self.algorithm.result_size(used_shape)
        return input_size + output_size    


    def get_required_disk_size(self, **kwargs):
        """
        Returns the required disk size to be able to run the adapter (in kB).
        """
        used_shape = (self.input_shape[0], 1, self.input_shape[2], self.input_shape[3])
        return self.array_size2kb(self.algorithm.result_size(used_shape))


    def launch(self, time_series, nfft=None):
        """ 
        Launch algorithm and build results. 
        """
        ##--------- Prepare a CoherenceSpectrum object for result ------------##
        coherence = CoherenceSpectrum(source=time_series,
                                      nfft=self.algorithm.nfft,
                                      storage_path=self.storage_path)
        
        ##------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), slice(self.input_shape[3])]
        
        ##---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries(use_storage=False)
        small_ts.sample_rate = time_series.sample_rate
        partial_coh = None
        for var in range(self.input_shape[1]):
            node_slice[1] = slice(var, var + 1)
            small_ts.data = time_series.read_data_slice(tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_coh = self.algorithm.evaluate()
            coherence.write_data_slice(partial_coh)
        coherence.frequency = partial_coh.frequency
        coherence.close_file()
        return coherence
コード例 #6
0
class NodeCoherenceAdapter(ABCAdapter):
    """ TVB adapter for calling the NodeCoherence algorithm. """

    _ui_name = "Cross coherence of nodes"
    _ui_description = "Compute Node Coherence for a TimeSeries input DataType."
    _ui_subsection = "coherence"

    def get_form_class(self):
        return NodeCoherenceForm

    def get_output(self):
        return [CoherenceSpectrumIndex]

    def configure(self, view_model):
        # type: (NodeCoherenceModel) -> None
        """
        Store the input shape to be later used to estimate memory usage.
        Also create the algorithm instance.
        """
        self.input_time_series_index = self.load_entity_by_gid(
            view_model.time_series)
        self.input_shape = (self.input_time_series_index.data_length_1d,
                            self.input_time_series_index.data_length_2d,
                            self.input_time_series_index.data_length_3d,
                            self.input_time_series_index.data_length_4d)
        self.log.debug("Time series shape is %s" % str(self.input_shape))
        # -------------------- Fill Algorithm for Analysis -------------------##
        self.algorithm = NodeCoherence()
        if view_model.nfft is not None:
            self.algorithm.nfft = view_model.nfft

    def get_required_memory_size(self, view_model):
        # type: (NodeCoherenceModel) -> int
        """
        Return the required memory to run this algorithm.
        """
        used_shape = (self.input_shape[0], 1, self.input_shape[2],
                      self.input_shape[3])
        input_size = numpy.prod(used_shape) * 8.0
        output_size = self.algorithm.result_size(used_shape)
        return input_size + output_size

    def get_required_disk_size(self, view_model):
        # type: (NodeCoherenceModel) -> int
        """
        Returns the required disk size to be able to run the adapter (in kB).
        """
        used_shape = (self.input_shape[0], 1, self.input_shape[2],
                      self.input_shape[3])
        return self.array_size2kb(self.algorithm.result_size(used_shape))

    def launch(self, view_model):
        # type: (NodeCoherenceModel) -> [CoherenceSpectrumIndex]
        """
        Launch algorithm and build results. 
        """
        # --------- Prepare a CoherenceSpectrum object for result ------------##
        coherence_spectrum_index = CoherenceSpectrumIndex()
        time_series_h5 = h5.h5_file_for_index(self.input_time_series_index)

        dest_path = h5.path_for(self.storage_path, CoherenceSpectrumH5,
                                coherence_spectrum_index.gid)
        coherence_h5 = CoherenceSpectrumH5(dest_path)
        coherence_h5.gid.store(uuid.UUID(coherence_spectrum_index.gid))
        coherence_h5.source.store(view_model.time_series)
        coherence_h5.nfft.store(self.algorithm.nfft)

        # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        input_shape = time_series_h5.data.shape
        node_slice = [
            slice(input_shape[0]), None,
            slice(input_shape[2]),
            slice(input_shape[3])
        ]

        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()
        small_ts.sample_period = time_series_h5.sample_period.load()
        small_ts.sample_period_unit = time_series_h5.sample_period_unit.load()
        partial_coh = None
        for var in range(input_shape[1]):
            node_slice[1] = slice(var, var + 1)
            small_ts.data = time_series_h5.read_data_slice(tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_coh = self.algorithm.evaluate()
            coherence_h5.write_data_slice(partial_coh)
        coherence_h5.frequency.store(partial_coh.frequency)
        array_metadata = coherence_h5.array_data.get_cached_metadata()
        freq_metadata = coherence_h5.frequency.get_cached_metadata()
        coherence_h5.close()
        time_series_h5.close()

        coherence_spectrum_index.array_data_min = array_metadata.min
        coherence_spectrum_index.array_data_max = array_metadata.max
        coherence_spectrum_index.array_data_mean = array_metadata.mean
        coherence_spectrum_index.array_has_complex = array_metadata.has_complex
        coherence_spectrum_index.array_is_finite = array_metadata.is_finite
        coherence_spectrum_index.shape = json.dumps(
            coherence_h5.array_data.shape)
        coherence_spectrum_index.ndim = len(coherence_h5.array_data.shape)
        coherence_spectrum_index.fk_source_gid = self.input_time_series_index.gid
        coherence_spectrum_index.nfft = partial_coh.nfft
        coherence_spectrum_index.frequencies_min = freq_metadata.min
        coherence_spectrum_index.frequencies_max = freq_metadata.max
        coherence_spectrum_index.subtype = CoherenceSpectrum.__name__

        return coherence_spectrum_index
コード例 #7
0
 def get_traited_datatype(self):
     return NodeCoherence()