Exemplo n.º 1
0
def correlate_data(data: np.ndarray, prev_buffer: np.ndarray,
                   radius: Optional[int]) -> CorrelationResult:
    """
    This is confusing.

    If data index < optimal, data will be too far to the right,
    and we need to `index += positive`.
    - The peak will appear near the right of `data`.

    Either we must slide prev_buffer to the right,
    or we must slide data to the left (by sliding index to the right):
    - correlate(data, prev_buffer)
    - trigger = index + peak_offset
    """
    N = len(data)
    corr = signal.correlate(data,
                            prev_buffer)  # returns double, not single/FLOAT
    Ncorr = 2 * N - 1
    assert len(corr) == Ncorr

    # Find optimal offset
    mid = N - 1

    if radius is not None:
        left = max(mid - radius, 0)
        right = min(mid + radius + 1, Ncorr)

        corr = corr[left:right]
        mid = mid - left

    # argmax(corr) == mid + peak_offset == (data >> peak_offset)
    # peak_offset == argmax(corr) - mid
    peak_offset = np.argmax(corr) - mid  # type: int
    return CorrelationResult(peak_offset, corr)
Exemplo n.º 2
0
    def get_trigger(self, index: int, cache: "PerFrameCache") -> int:
        N = self._buffer_nsamp

        # Get data
        data = self._wave.get_around(index, N, self._stride)
        data -= cache.mean
        normalize_buffer(data)
        data *= self._data_window

        # Window data
        if cache.period is None:
            raise CorrError(
                "Missing 'cache.period', try stacking CorrelationTrigger "
                "before LocalPostTrigger")

        # To avoid sign errors, see comment in CorrelationTrigger.get_trigger().
        corr = signal.correlate(data, self._windowed_step)
        assert len(corr) == 2 * N - 1
        mid = N - 1

        # If we're near a falling edge, don't try to make drastic changes.
        if corr[mid] < 0:
            # Give up early.
            return index

        # Don't punish negative results too much.
        # (probably useless. if corr[mid] >= 0,
        # all other negative entries will never be optimal.)
        # np.abs(corr, out=corr)

        # Subtract cost function
        cost = self._cost_norm / cache.period
        corr -= cost

        # Find optimal offset (within ±N/4)
        mid = N - 1
        radius = round(N / 4)

        left = mid - radius
        right = mid + radius + 1

        corr = corr[left:right]
        mid = mid - left

        peak_offset = np.argmax(corr) - mid  # type: int
        trigger = index + (self._stride * peak_offset)

        return trigger
Exemplo n.º 3
0
def get_period(data: np.ndarray) -> int:
    """
    Use autocorrelation to estimate the period of a signal.
    Loosely inspired by https://github.com/endolith/waveform_analysis
    """
    corr = signal.correlate(data, data)
    corr = corr[len(corr) // 2:]

    # Remove the zero-correlation peak
    zero_crossings = np.where(corr < 0)[0]

    if len(zero_crossings) == 0:
        # This can happen given an array of all zeros. Anything else?
        return len(data)

    crossX = zero_crossings[0]
    peakX = crossX + np.argmax(corr[crossX:])
    return int(peakX)
Exemplo n.º 4
0
def correlate_spectrum(data: np.ndarray, prev_buffer: np.ndarray,
                       radius: Optional[int]) -> SpectrumResult:
    """
    This is confusing.

    If data index < optimal, data will be too far to the right,
    and we need to `index += positive`.
    - The peak will appear near the right of `data`.

    Either we must slide prev_buffer to the right,
    or we must slide data to the left (by sliding index to the right):
    - correlate(data, prev_buffer)
    - trigger = index + peak_offset

    In correlate_spectrum(), I used to use parabolic() on the return value,
    but unfortunately it was unreliable and caused Plok Beach bass to jitter,
    so I turned it off (resulting in the same code as correlate_data).
    """
    N = len(data)
    corr = signal.correlate(data,
                            prev_buffer)  # returns double, not single/f32
    Ncorr = 2 * N - 1
    assert len(corr) == Ncorr

    # Find optimal offset
    mid = N - 1

    if radius is not None:
        left = max(mid - radius, 0)
        right = min(mid + radius + 1, Ncorr)

        corr = corr[left:right]
        mid = mid - left

    # argmax(corr) == mid + peak_offset == (data >> peak_offset)
    # peak_offset == argmax(corr) - mid
    peak_offset = np.argmax(corr) - mid  # type: int
    return SpectrumResult(peak_offset, corr)
Exemplo n.º 5
0
def get_period(
    data: np.ndarray,
    subsmp_s: float,
    max_freq: float,
    self: "Optional[t.CorrelationTrigger]" = None,
) -> int:
    """
    Use tweaked autocorrelation to estimate the period (AKA pitch) of a signal.
    Loosely inspired by https://github.com/endolith/waveform_analysis

    Design principles:
    - It is better to overestimate the period than underestimate.
        - Underestimation leads to bad triggering.
        - Overestimation only leads to slightly increased x-distance.
    - When the wave is exiting the field of view,
        do NOT estimate an egregiously large period.
    - Do not report a tiny period when faced with YM2612 FM feedback/noise.
        - See get_min_period() docstring.

    Return value:
    - Returns 0 if period cannot be estimated.
        This is a good placeholder value
        since it causes buffers/etc. to be basically not updated.
    """
    UNKNOWN_PERIOD = 0

    N = len(data)

    # If no input, return period of 1.
    if np.max(np.abs(data)) < MIN_AMPLITUDE:
        return UNKNOWN_PERIOD

    # Begin.
    corr_symmetric = signal.correlate(data, data)
    mid = len(corr_symmetric) // 2
    corr = corr_symmetric[mid:]
    assert len(corr) == len(data)

    def get_min_period() -> int:
        """
        Avoid picking periods shorter than `max_freq`.
        - Yamaha FM feedback produces nearly inaudible high frequencies,
          which tend to produce erroneously short period estimates,
          causing correlation to fail.
        - Most music does not go this high.
        - Overestimating period of high notes is mostly harmless.
        """
        max_cyc_s = max_freq
        min_s_cyc = 1 / max_cyc_s
        min_subsmp_cyc = subsmp_s * min_s_cyc
        return iround(min_subsmp_cyc)

    def get_zero_crossing() -> int:
        """Remove the central peak."""
        zero_crossings = np.where(corr < 0)[0]
        if len(zero_crossings) == 0:
            # This can happen given an array of all zeros. Anything else?
            return UNKNOWN_PERIOD
        return zero_crossings[0]

    min_period = get_min_period()
    zero_crossing = get_zero_crossing()
    if zero_crossing == UNKNOWN_PERIOD:
        return UNKNOWN_PERIOD

    # [minX..) = [min_period..) & [zero_crossing..)
    minX = max(min_period, zero_crossing)

    # Remove the zero-correlation peak.
    def calc_peak():
        return minX + np.argmax(corr[minX:])

    temp_peakX = calc_peak()
    # In the case of uncorrelated noise,
    # corr[temp_peakX] can be tiny (smaller than N * MIN_AMPLITUDE^2).
    # But don't return 0 since it's not silence.

    is_long_period = temp_peakX > 0.1 * N
    if is_long_period:
        # If a long-period wave has strong harmonics,
        # the true peak will be attenuated below the harmonic peaks.
        # Compensate for that.
        divisor = np.linspace(1, 1 - EDGE_COMPENSATION, N, endpoint=False, dtype=FLOAT)
        divisor = np.maximum(divisor, 1 / MAX_AMPLIFICATION)
        corr /= divisor
        peakX = calc_peak()

    else:
        peakX = temp_peakX

    return int(peakX)
Exemplo n.º 6
0
    def get_trigger(self, index: int, cache: "PerFrameCache") -> int:
        N = self._buffer_nsamp

        # Get data
        stride = self._stride
        data = self._wave.get_around(index, N, stride)
        cache.mean = np.mean(data)
        data -= cache.mean

        # Window data
        period = get_period(data)
        cache.period = period * stride

        if self._is_window_invalid(period):
            diameter, falloff = [
                round(period * x) for x in self.cfg.trigger_falloff
            ]
            falloff_window = cosine_flat(N, diameter, falloff)
            window = np.minimum(falloff_window, self._data_taper)

            self._prev_period = period
            self._prev_window = window
        else:
            window = self._prev_window

        data *= window

        # prev_buffer
        prev_buffer = self._windowed_step + self._buffer

        # Calculate correlation
        """
        If offset < optimal, we need to `offset += positive`.
        - The peak will appear near the right of `data`.

        Either we must slide prev_buffer to the right:
        - correlate(data, prev_buffer)
        - trigger = offset + peak_offset

        Or we must slide data to the left (by sliding offset to the right):
        - correlate(prev_buffer, data)
        - trigger = offset - peak_offset
        """
        corr = signal.correlate(
            data, prev_buffer)  # returns double, not single/FLOAT
        assert len(corr) == 2 * N - 1

        # Find optimal offset (within trigger_diameter, default=±N/4)
        mid = N - 1
        radius = round(N * self.cfg.trigger_diameter / 2)

        left = mid - radius
        right = mid + radius + 1

        corr = corr[left:right]
        mid = mid - left

        # argmax(corr) == mid + peak_offset == (data >> peak_offset)
        # peak_offset == argmax(corr) - mid
        peak_offset = np.argmax(corr) - mid  # type: int
        trigger = index + (stride * peak_offset)

        # Apply post trigger (before updating correlation buffer)
        if self.post:
            trigger = self.post.get_trigger(trigger, cache)

        # Update correlation buffer (distinct from visible area)
        aligned = self._wave.get_around(trigger, self._buffer_nsamp, stride)
        self._update_buffer(aligned, cache)

        return trigger