Example #1
0
 def scale(self, peaks: Peaks, time: float, corresponding_events: List[CorpusEvent],
           corresponding_transforms: List[AbstractTransform], corpus: Corpus = None, **kwargs) -> Peaks:
     event_indices: np.ndarray = np.array([e.state_index for e in corresponding_events], dtype=int)
     matching_indices: np.ndarray = np.zeros(len(corresponding_events), dtype=bool)
     for taboo_index in self._taboo_indices:
         matching_indices += event_indices == taboo_index
     peaks.scale(0, matching_indices)
     return peaks
Example #2
0
 def scale(self, peaks: Peaks, time: float, corresponding_events: List[CorpusEvent],
           corresponding_transforms: List[AbstractTransform], corpus: Corpus = None, **kwargs) -> Peaks:
     events_band_distribution: np.ndarray = np.array([event.get_feature(OctaveBands).value()
                                                      for event in corresponding_events])
     factor: np.ndarray = 1 / np.sqrt(np.sum(np.power(events_band_distribution - self.band_distribution, 2), axis=1))
     print(np.min(factor), np.max(factor), factor.shape)  # TODO: THIS SHOULD BE HERE UNTIL PROPERLY DEBUGGED
     peaks.scale(factor)
     return peaks
Example #3
0
 def scale(self, peaks: Peaks, time: float, corresponding_events: List[CorpusEvent],
           _corresponding_transforms: List[AbstractTransform], corpus: Corpus = None, **_kwargs) -> Peaks:
     if self._previous_output_index is None:
         return peaks
     else:
         event_indices: np.ndarray = np.array([e.state_index for e in corresponding_events], dtype=int)
         is_matching: np.ndarray = event_indices == self._previous_output_index + 1
         peaks.scale(self.factor, is_matching)
         return peaks
Example #4
0
 def scale(self, peaks: Peaks, _time: float, corresponding_events: List[CorpusEvent],
           corresponding_transforms: List[AbstractTransform], corpus: Corpus = None, **_kwargs) -> Peaks:
     # TODO: This could be optimized and stored if ScaleAction had direct access to Corpus
     low_index: int = int(self._low_thresh.value * corpus.length())
     high_index: int = int(self._high_thresh.value * corpus.length())
     corresponding_indices: np.ndarray = np.array([e.state_index for e in corresponding_events], dtype=int)
     mask: np.ndarray = ((low_index <= corresponding_indices) & (corresponding_indices <= high_index)).astype(int)
     peaks.scale(mask)
     return peaks
Example #5
0
 def scale(self, peaks: Peaks, time: float, corresponding_events: List[CorpusEvent],
           corresponding_transforms: List[AbstractTransform], corpus: Corpus = None, **kwargs) -> Peaks:
     if self._previous_transform is None or self._transform_handler:
         return peaks
     else:
         peak_transform_ids: np.ndarray = np.array(peaks.transform_ids)
         previous_transform_id: int = self._transform_handler.get_id(self._previous_transform)
         not_matching: np.ndarray = peak_transform_ids != previous_transform_id
         peaks.scale(self.factor, not_matching)
         return peaks
Example #6
0
 def insert(self, influences: List[PeakEvent]) -> None:
     self._peaks = Peaks(
         scores=np.array(
             [self.default_score.value for _ in range(len(influences))],
             dtype=np.float),
         times=np.array([influence.event.onset for influence in influences],
                        dtype=np.float),
         transform_hashes=np.array(
             [influence.transform_hash for influence in influences],
             dtype=np.int32))
Example #7
0
class DecayActivityPattern(AbstractActivityPattern):
    """Decay: score = exp(-(Δt)/tau), where Δt is the time since creation in beats"""

    DEFAULT_T = 4.6

    def __init__(self,
                 corpus: Corpus = None,
                 tau_mem_decay: float = DEFAULT_T):
        super().__init__(corpus)
        self.logger.debug("[__init__]: ClassicActivityPattern initialized.")
        self.extinction_threshold: Parameter = Parameter(
            0.1, 0.0, None, 'float', "Score below which peaks are removed")
        # TODO: tau shouldn't be the parameter: t should
        self.tau_mem_decay: Parameter = ParamWithSetter(
            self._calc_tau(tau_mem_decay), 0, None, "float",
            "Number of updates until peak is decayed below threshold.",
            self._set_tau)
        self.default_score: Parameter = Parameter(
            1.0, None, None, 'float', "Value of a new peaks upon creation.")
        self._peaks: Peaks = Peaks.create_empty()
        self.last_update_time: float = 0.0
        self._parse_parameters()

    def insert(self, influences: List[PeakEvent]) -> None:
        self._peaks = Peaks(
            scores=np.array(
                [self.default_score.value for _ in range(len(influences))],
                dtype=np.float),
            times=np.array([influence.event.onset for influence in influences],
                           dtype=np.float),
            transform_hashes=np.array(
                [influence.transform_hash for influence in influences],
                dtype=np.int32))

    def update_peaks_on_influence(self, new_time: float) -> None:
        self.last_update_time = new_time

    def update_peaks_on_new_event(self, new_time: float) -> None:
        self._peaks.scores *= np.exp(-np.divide(
            new_time - self.last_update_time, self.tau_mem_decay.value))
        self.last_update_time = new_time
        indices_to_remove: np.ndarray = np.where(
            self._peaks.scores <= self.extinction_threshold.value)
        self._peaks.remove(indices_to_remove)

    def clear(self) -> None:
        self._peaks = Peaks.create_empty()
        self.last_update_time = 0.0

    def _set_tau(self, t: float):
        self.tau_mem_decay.value = self._calc_tau(t)

    def _calc_tau(self, t: float):
        """ n is the number of updates until peak decays below threshold"""
        return -np.divide(t, np.log(self.extinction_threshold.value - 0.001))
Example #8
0
    def merge(self,
              peaks: Peaks,
              _time: float,
              corpus: Corpus = None,
              **_kwargs) -> Peaks:
        if peaks.size() <= 1:
            return peaks
        self.logger.debug(
            f"[merge] Merging activity with {peaks.size()} peaks.")

        duration: float = corpus.duration()
        inv_duration: float = 1 / duration
        num_rows: int = int(duration / self._t_width.value)

        peaks_list: List[Peaks] = []
        for transform_hash in np.unique(peaks.transform_ids):
            indices: np.ndarray = np.argwhere(
                peaks.transform_ids == transform_hash)
            indices = indices.reshape((indices.size, ))
            scores: np.ndarray = peaks.scores[indices]
            times: np.ndarray = peaks.times[indices]
            num_cols: int = scores.size

            row_indices: np.ndarray = np.floor(times * inv_duration *
                                               num_rows).astype(np.int32)
            interp_matrix: sparse.coo_matrix = sparse.coo_matrix(
                (np.ones(num_cols), (row_indices, np.arange(num_cols))),
                shape=(num_rows + 1, num_cols))
            interp_matrix: sparse.csc_matrix = interp_matrix.tocsc()

            interpolated_scores: np.ndarray = interp_matrix.dot(scores)
            interpolated_times: np.ndarray = interp_matrix.dot(times)
            num_peaks_per_index: np.ndarray = np.array(
                interp_matrix.sum(axis=1)).reshape(interp_matrix.shape[0])
            peak_indices: np.ndarray = interpolated_scores.nonzero()[0]

            scores: np.ndarray = interpolated_scores[peak_indices]
            times: np.ndarray = np.divide(interpolated_times[peak_indices],
                                          num_peaks_per_index[peak_indices])
            transforms: np.ndarray = np.ones(peak_indices.size,
                                             dtype=np.int32) * transform_hash
            # print("After merge:", scores.shape, times.shape, transforms.shape)

            peaks_list.append(Peaks(scores, times, transforms))

        merged_peaks: Peaks = Peaks.concatenate(peaks_list)
        self.logger.debug(
            f"[merge] Merge successful. Number of peaks after merge: {merged_peaks.size()}."
        )
        return merged_peaks
Example #9
0
 def __init__(self, corpus: Optional[Corpus] = None):
     super().__init__(corpus=corpus)
     self.logger.debug("[__init__]: ManualActivityPattern initialized.")
     self.default_score: Parameter = Parameter(
         1.0, None, None, 'float', "Value of a new peaks upon creation.")
     self._peaks: Peaks = Peaks.create_empty()
     self._parse_parameters()
Example #10
0
    def __init__(
        self,
        name: str,
        peak_selector: AbstractPeakSelector = AbstractPeakSelector.default(),
        merge_action: AbstractMergeAction = AbstractMergeAction.default(),
        corpus: Optional[Corpus] = None,
        scale_actions: List[AbstractScaleAction] = AbstractScaleAction.
        default_set()):
        super().__init__()
        self.logger = logging.getLogger(__name__)
        self.name: str = name
        self._transform_handler: TransformHandler = TransformHandler()
        self.peak_selector: AbstractPeakSelector = peak_selector
        self.corpus: Optional[Corpus] = corpus
        self.scale_actions: Dict[Type[AbstractScaleAction],
                                 AbstractScaleAction] = {}
        self.merge_action: AbstractMergeAction = merge_action

        self.atoms: Dict[str, Atom] = {}

        for scale_action in scale_actions:
            self.add_scale_action(scale_action)

        self.previous_peaks: Peaks = Peaks.create_empty()
        self._transform_handler: TransformHandler = TransformHandler()

        self._force_jump_index: Optional[int] = None

        self.enabled: Parameter = Parameter(default_value=True,
                                            min=False,
                                            max=True,
                                            type_str="bool",
                                            description="Enables this Player.")

        self._parse_parameters()
Example #11
0
 def scale(self, peaks: Peaks, time: float, corresponding_events: List[CorpusEvent],
           corresponding_transforms: List[AbstractTransform], corpus: Corpus = None, **kwargs) -> Peaks:
     event_indices: List[int] = [e[0].state_index for e in self._history.get_n_last(self.jump_threshold + 1)]
     if not event_indices:
         return peaks
     previous_index: int = event_indices[0]
     num_consecutive_events: int = len(list(itertools.takewhile(lambda a: a == -1, np.diff(event_indices))))
     if num_consecutive_events <= self.activation_threshold:
         factor: float = 1.0
     elif num_consecutive_events >= self.jump_threshold:
         factor: float = 0.0
     else:
         factor: float = 0.5 ** (num_consecutive_events - self.activation_threshold)
     event_indices: np.ndarray = np.array([e.state_index for e in corresponding_events], dtype=int)
     is_matching: np.ndarray = event_indices == previous_index + 1
     peaks.scale(factor, is_matching)
     return peaks
Example #12
0
 def _decide_default(
         self, peaks: Peaks, corpus: Corpus,
         transform_handler: TransformHandler,
         **kwargs) -> Optional[Tuple[CorpusEvent, AbstractTransform]]:
     if peaks.is_empty():
         return None
     max_peak_value: float = np.max(peaks.scores)
     if max_peak_value < self.threshold:
         return None
     else:
         return super()._decide_default(peaks, corpus, transform_handler)
Example #13
0
    def clear(self):
        """ Reset runtime state without modifying any parameters or settings """
        self.previous_peaks = Peaks.create_empty()
        self.peak_selector.clear()
        for scale_action in self.scale_actions.values():
            scale_action.clear()

        for atom in self.atoms.values():
            atom.clear()

        self._transform_handler.clear()
Example #14
0
 def __init__(self, corpus: Corpus = None):
     super().__init__(corpus)
     self.logger.debug("[__init__]: ManualActivityPattern initialized.")
     self.extinction_threshold: Parameter = Parameter(
         0.1, 0.0, None, 'float', "Score below which peaks are removed")
     self.tau_mem_decay: Parameter = ParamWithSetter(
         self._calc_tau(self.DEFAULT_N), 1, None, "int",
         "Number of updates until peak is decayed below threshold.",
         self._set_tau)
     self.default_score: Parameter = Parameter(
         1.0, None, None, 'float', "Value of a new peaks upon creation.")
     self._peaks: Peaks = Peaks.create_empty()
     self.last_update_time: float = 0.0
     self._event_indices: np.ndarray = np.zeros(0, dtype=np.int32)
     self._parse_parameters()
Example #15
0
 def _decide_default(
         self, peaks: Peaks, corpus: Corpus,
         transform_handler: TransformHandler,
         **kwargs) -> Optional[Tuple[CorpusEvent, AbstractTransform]]:
     if peaks.is_empty():
         return None
     score_cumsum: np.ndarray = np.cumsum(peaks.scores)
     max_value: float = score_cumsum[
         -1] - 1e-5  # slight offset to avoid an extremely rare case of a fp error
     output_target_score: float = float(np.random.random(1) * max_value)
     peak_idx: int = np.argwhere(score_cumsum > output_target_score)[0]
     transform_hash: int = int(peaks.transform_ids[peak_idx])
     return corpus.event_around(
         peaks.times[peak_idx]), transform_handler.get_transform(
             transform_hash)
Example #16
0
 def _scale_peaks(self, peaks: Peaks, scheduler_time: float, corpus: Corpus,
                  **kwargs):
     if peaks.is_empty():
         return peaks
     corresponding_events: List[CorpusEvent] = corpus.events_around(
         peaks.times)
     corresponding_transforms: List[AbstractTransform] = [
         self._transform_handler.get_transform(t)
         for t in np.unique(peaks.transform_ids)
     ]
     for scale_action in self.scale_actions.values():
         if scale_action.is_enabled_and_eligible():
             peaks = scale_action.scale(peaks, scheduler_time,
                                        corresponding_events,
                                        corresponding_transforms, corpus,
                                        **kwargs)
     return peaks
Example #17
0
 def _decide_default(
         self, peaks: Peaks, corpus: Corpus,
         transform_handler: TransformHandler,
         **kwargs) -> Optional[Tuple[CorpusEvent, AbstractTransform]]:
     self.logger.debug("[decide] _decide_default called.")
     if peaks.is_empty():
         return None
     max_peak_value: float = np.max(peaks.scores)
     self.logger.debug(
         f"[decide_default] Max peak value is {max_peak_value}.")
     max_peaks_idx: List[int] = np.argwhere(
         np.abs(peaks.scores - max_peak_value) < 0.001)
     peak_idx: int = random.choice(max_peaks_idx)
     transform_hash: int = int(peaks.transform_ids[peak_idx])
     return corpus.event_around(
         peaks.times[peak_idx]), transform_handler.get_transform(
             transform_hash)
Example #18
0
 def __init__(self,
              corpus: Corpus = None,
              tau_mem_decay: float = DEFAULT_T):
     super().__init__(corpus)
     self.logger.debug("[__init__]: ClassicActivityPattern initialized.")
     self.extinction_threshold: Parameter = Parameter(
         0.1, 0.0, None, 'float', "Score below which peaks are removed")
     # TODO: tau shouldn't be the parameter: t should
     self.tau_mem_decay: Parameter = ParamWithSetter(
         self._calc_tau(tau_mem_decay), 0, None, "float",
         "Number of updates until peak is decayed below threshold.",
         self._set_tau)
     self.default_score: Parameter = Parameter(
         1.0, None, None, 'float', "Value of a new peaks upon creation.")
     self._peaks: Peaks = Peaks.create_empty()
     self.last_update_time: float = 0.0
     self._parse_parameters()
Example #19
0
    def _merged_peaks(self, time: float, corpus: Corpus, **kwargs) -> Peaks:
        weight_sum: float = 0.0
        for atom in self.atoms.values():
            weight_sum += atom.weight if atom.is_enabled_and_eligible(
            ) else 0.0
        if weight_sum < 1e-6:
            self.logger.warning(f"Weights are invalid. Setting weight to 1.0.")
            weight_sum = 1.0

        peaks_list: List[Peaks] = []
        for atom in self.atoms.values():
            if atom.is_enabled_and_eligible():
                peaks: Peaks = atom.pop_peaks()
                peaks.scale(atom.weight / weight_sum)
                peaks_list.append(peaks)

        all_peaks: Peaks = Peaks.concatenate(peaks_list)
        return self.merge_action.merge(all_peaks, time, corpus, **kwargs)
Example #20
0
 def clear(self) -> None:
     self._peaks = Peaks.create_empty()
     self._event_indices = np.zeros(0, dtype=np.int32)
     self.last_update_time = 0.0
Example #21
0
 def clear(self) -> None:
     self._peaks = Peaks.create_empty()
Example #22
0
 def pop_peaks(self) -> Peaks:
     return_peaks: Peaks = self._peaks
     self._peaks = Peaks.create_empty()
     return return_peaks
Example #23
0
 def pop_peaks(self) -> Peaks:
     """ Returns a shallow copy the activity pattern's peaks (copy of scores but references to times and hashes)"""
     return Peaks.optimized_copy(self._peaks)
Example #24
0
 def clear(self) -> None:
     self._peaks = Peaks.create_empty()
     self.last_update_time = 0.0
Example #25
0
    def pop_peaks(self) -> Peaks:
        """ Returns a shallow copy the activity pattern's peaks (copy of scores but references to times and hashes)
            Note: For certain activity patterns, may have side effects such as removing the peaks from the memory,
                  do not use outside main runtime architecture. """

        return Peaks.optimized_copy(self._peaks)
Example #26
0
 def __init__(self, corpus: Optional[Corpus] = None):
     super(AbstractActivityPattern, self).__init__()
     self.logger = logging.getLogger(__name__)
     self._peaks: Peaks = Peaks.create_empty()
     self.corpus: Corpus = corpus