Exemple #1
0
def plot_doa(x_t, title):
    x = psa.Signal(x_t, fs, 'acn', 'sn3d')
    X = psa.Stft.fromSignal(x,
                            window_size=window_size,
                            window_overlap=window_overlap,
                            nfft=nfft)
    X_doa = psa.compute_DOA(X)
    psa.plot_doa(X_doa, title)
    plt.show()
    return X_doa
Exemple #2
0
        window_overlap = window_size//2

        _, _, S_dir = scipy.signal.stft(s_dir, fs, nperseg=window_size, noverlap=window_overlap )


        s_tot_ambi = psa.Signal(bformat, fs, 'acn', 'n3d')
        S_tot_ambi = psa.Stft.fromSignal(s_tot_ambi,
                                window_size=window_size,
                                window_overlap=window_overlap
                                )
        doa = psa.compute_DOA(S_tot_ambi)
        directivity = S_tot_ambi.compute_ita_re(r=r)

        psa.plot_signal(s_tot_ambi)
        psa.plot_magnitude_spectrogram(S_tot_ambi)
        psa.plot_doa(doa)
        psa.plot_directivity(directivity)
        # psa.plot_directivity(directivity.sqrt())

        est_S_dir_ambi = S_tot_ambi.apply_mask(directivity.sqrt())
        est_S_dir = est_S_dir_ambi.data[0]
        # psa.plot_magnitude_spectrogram(est_S_dir_ambi)
        doa_est_S_dir_ambi = psa.compute_DOA(est_S_dir_ambi)
        directivity_est_S_dir_ambi = est_S_dir_ambi.compute_ita_re(r=r)
        psa.plot_doa(doa_est_S_dir_ambi,title='after method')
        # psa.plot_directivity(directivity_est_S_dir_ambi)
        plt.show()

        est_s_dir_ambi = psa.Signal.fromStft(est_S_dir_ambi,
                                       window_size=window_size,
                                       window_overlap=window_overlap
Exemple #3
0
def estimate_doa(data, sr, params):
    """
    Given an input audio, get the most significant tf bins per frame
    :param data: np.array (num_frames, num_channels)
    :param sr:  sampling rate
    :param params: params dict
    :return: an array in the form :
        [frame, [class_id, azi, ele],[class_id, azi, ele]... ]
        without repeated frame instances, quantized at hop_size,
        containing all valid tf bins doas.
    """

    ### Preprocess data
    X = preprocess(data, sr, params)
    N = X.get_num_time_bins()
    K = X.get_num_frequency_bins()
    r = params['r']

    ### Diffuseness mask
    doa = psa.compute_DOA(X)
    directivity = X.compute_ita_re(r=r)
    directivity_mask = directivity.compute_mask(th=params['directivity_th'])


    ### Energy density mask
    e = psa.compute_energy_density(X)
    block_size = params['energy_density_local_th_size']
    tl = e.compute_threshold_local(block_size=block_size)
    e_mask = e.compute_mask(tl)


    ### DOA Variance mask (computed on azimuth variance)
    vicinity_radius = params['doa_std_vicinity_radius']
    if np.size(vicinity_radius) == 1:
        # Square!
        r_k = vicinity_radius
        r_n = vicinity_radius
    elif np.size(vicinity_radius) == 2:
        # Rectangle! [k, n]
        r_k = vicinity_radius[0]
        r_n = vicinity_radius[1]
    else:
        Warning.warn()

    # TODO: optimize the for loop
    std = np.zeros((K, N))
    doa0_k_array = []
    for r in range(-r_n,r_n+1):
        doa0_k_array.append(np.roll(doa.data[0,:,:],r))
    doa0_k = np.stack(doa0_k_array, axis=0)

    for k in range(r_k, K - r_k):
        std[k, :] = scipy.stats.circstd(doa0_k[:, k - r_k:k + r_k + 1, :], high=np.pi, low=-np.pi, axis=(0, 1))

    # not optimized version...
    # for k in range(r_k, K-r_k):
    #     for n in range(r_n, N-r_n):
    #         # azi
    #         std[k, n] = scipy.stats.circstd(doa.data[0, k-r_k:k+r_k+1, n-r_n:n+r_n+1], high=np.pi, low=-np.pi)
    #         # ele
    #         # std[k, n] = np.std(doa.data[1, k-r_k:k+r_k+1, n-r_n:n+r_n+1])

    # Edges: largest value
    std_max = np.max(std)
    std[0:r_k, :] = std_max
    std[K-r_k:K, :] = std_max
    std[:, 0:r_n] = std_max
    std[:, N - r_n:N] = std_max
    # Scale values to min/max
    std_scaled = std / std_max
    # Invert values
    std_scaled_inv = 1 - std_scaled

    # Compute mask
    doa_std = psa.Stft(doa.t, doa.f, std_scaled_inv, doa.sample_rate)
    doa_std_mask = doa_std.compute_mask(th=params['doa_std_th'])
    mask_all = doa_std_mask.apply_mask(directivity_mask).apply_mask(e_mask)
    doa_th = doa.apply_mask(mask_all)


    ## Median average
    median_averaged_doa = np.empty(doa.data.shape)
    median_averaged_doa.fill(np.nan)
    vicinity_size = (2*r_k-1) + (2*r_n-1)
    doa_median_average_nan_th = params['doa_median_average_nan_th']

    vicinity_radius = params['median_filter_vicinity_radius']
    if np.size(vicinity_radius) == 1:
        # Square!
        r_k = vicinity_radius
        r_n = vicinity_radius
    elif np.size(vicinity_radius) == 2:
        # Rectangle! [k, n]
        r_k = vicinity_radius[0]
        r_n = vicinity_radius[1]
    else:
        Warning.warn()

    # TODO: optimize the for loop
    for k in range(r_k, K - r_k):
        for n in range(r_n, N - r_n):
            azis = discard_nans(doa_th.data[0, k - r_k:k + r_k + 1, n - r_n:n + r_n + 1].flatten())
            if azis.size > vicinity_size * doa_median_average_nan_th:
                median_averaged_doa[0, k, n] = circmedian(azis, 'rad')
            eles = discard_nans(doa_th.data[1, k - r_k:k + r_k + 1, n - r_n:n + r_n + 1].flatten())
            if eles.size > vicinity_size * doa_median_average_nan_th:
                median_averaged_doa[1, k, n] = np.median(eles)
    doa_th_median = psa.Stft(doa.t, doa.f, median_averaged_doa, doa.sample_rate)


    ## Plot stuff
    if params['plot']:
        psa.plot_doa(doa, title='doa')
        psa.plot_doa(doa.apply_mask(e_mask), title='e mask')
        psa.plot_doa(doa.apply_mask(directivity_mask), title='directivity mask')
        psa.plot_doa(doa.apply_mask(doa_std_mask), title='doa std mask')
        psa.plot_doa(doa_th, title='doa mask all')
        psa.plot_doa(doa_th_median, title='doa circmedian')
        plt.show()


    ## Fold values into a vector

    # Get a list of bins with the position estimation according to the selected doa_method
    # TODO: OPTIMIZE
    active_windows = []
    position = []
    for n in range(N):
        azi = discard_nans(doa_th_median.data[0, :, n])
        ele = discard_nans(doa_th_median.data[1, :, n])
        if np.size(azi) < params['num_min_valid_bins']:
            # Empty! not enough suitable doa values in this analysis window
            pass
        else:
            active_windows.append(n)
            position.append([rad2deg(azi), rad2deg(ele)])

    # result = [bin, class_id, azi, ele] with likely repeated bin instances
    result = []
    label = params['default_class_id']
    for window_idx, window in enumerate(active_windows):
        num_bins = np.shape(position[window_idx])[1]
        for b in range(num_bins):
            azi = position[window_idx][0][b]
            ele = position[window_idx][1][b]
            result.append([window, label, azi, ele])


    # Perform the window transformation by averaging within frame
    ## TODO: assert our bins are smaller than required ones

    current_window_hop = (params['window_size'] - params['window_overlap']) / float(sr)
    window_factor = params['required_window_hop'] / current_window_hop

    # Since frames are ordered (at least they should), we can optimise that a little bit
    last_frame = -1
    # result_quantized = [frame, [class_id, azi, ele],[class_id, azi, ele]... ] without repeated bin instances
    result_quantized = []
    for row in result:
        frame = row[0]
        new_frame = int(np.floor(frame / window_factor))
        if new_frame == last_frame:
            result_quantized[-1].append([row[1], row[2], row[3]])
        else:
            result_quantized.append([new_frame, [row[1], row[2], row[3]]])
        last_frame = new_frame

    return result_quantized
psa.plot_diffuseness(diffuseness_stft, title='diffuseness')

## Apply diffuseness mask to separate background and foreground

background_stft = stft.apply_mask(diffuseness_stft)
foreground_stft = stft.apply_mask(directivity_stft)

psa.plot_magnitude_spectrogram(background_stft,
                               title='background magnitude spectrogram')
psa.plot_magnitude_spectrogram(foreground_stft,
                               title='foreground magnitude spectrogram')

## Find DOA on the foreground

doa = psa.compute_DOA(stft)
psa.plot_doa(doa, title='doa')

doa_foreground = psa.compute_DOA(foreground_stft)
psa.plot_doa(doa_foreground, title='doa_foreground')

doa_background = psa.compute_DOA(background_stft)
psa.plot_doa(doa_background, title='doa_background')

## Just get the most directive values

directivity_mask = directivity_stft.compute_mask(th=0.95)
psa.plot_mask(directivity_mask, title='directivity mask')

masked_doa_foreground = doa_foreground.apply_mask(directivity_mask)
psa.plot_doa(masked_doa_foreground, title='masked_doa_foreground')
def compute_peak_statistics(ir,
                            sample_rate,
                            ambisonics_ordering,
                            ambisonics_normalization,
                            plot=False,
                            plot_title = ''):


    ## Signal
    signal = psa.Signal(ir, int(sample_rate), ambisonics_ordering, ambisonics_normalization)
    if plot:
        psa.plot_signal(signal,title=plot_title+'IR')

    stft = psa.Stft.fromSignal(signal,
                               window_size=analysis_window_size,
                               window_overlap=window_overlap,
                               nfft=fft_size,
                               )
    stft = stft.limit_bands(fmin=fmin, fmax=fmax)

    if plot:
        psa.plot_magnitude_spectrogram(stft,title=plot_title+'IR Magnitude Spectrogram, w='+str(analysis_window_size))

    ### Energy Density
    energy_density_t = psa.compute_energy_density(signal)
    if plot:
        psa.plot_signal(energy_density_t,'Energy Density', y_scale='log')

    # # Smoothed signal
    # L = gaussian_window_length
    # smooth_window = scipy.signal.general_gaussian(L, p=gaussian_window_shape, sig=gaussian_window_std)
    # smoothed_energy_density_t = scipy.signal.fftconvolve(smooth_window, energy_density_t.data[0, :])
    # smoothed_energy_density_t = (np.average(energy_density_t.data[0, :]) / np.average(smoothed_energy_density_t)) * smoothed_energy_density_t
    # smoothed_energy_density_t = np.roll(smoothed_energy_density_t, -((L - 1) / 2))
    # smoothed_energy_density_t = smoothed_energy_density_t[:-(L - 1)]  # same length
    #
    # ### Peak peaking
    #
    # # WAVELET
    # cwt_widths = np.arange(0.5*L,1.5*L) # Find peaks of shape among 2 gaussian window lengths
    # smoothed_peaks = scipy.signal.find_peaks_cwt(smoothed_energy_density_t, widths=cwt_widths)

    # # Fine sample correction of peaks: find local maxima over a gaussian window length
    # corrected_peaks = copy.deepcopy(smoothed_peaks)
    # for peak_idx,peak in enumerate(smoothed_peaks):
    #     local_energy = smoothed_energy_density_t[peak - (L / 2):peak + (L / 2)]
    #     corrected_peaks[peak_idx] = np.argmax(local_energy) + peak - (L / 2)
    #
    # if plot:
    #     plt.figure()
    #     plt.suptitle('Smoothed Energy Density & peaks')
    #     ax = plt.subplot(111)
    #     ax.semilogy(energy_density_t.data[0,:])
    #     ax.semilogy(smoothed_energy_density_t)
    #
    #     # plot peak estimates
    #     for peak in corrected_peaks:
    #         plt.axvline(x=peak, color='g')
    #
    #     # plot time frames
    #     for x in np.arange(0,processing_window_samples,analysis_window_size):
    #         plt.axvline(x=x, color='r', alpha=0.3)
    #
    #     plt.grid()
    #
    # peak_time_bins = []
    # for peak in corrected_peaks:
    #     peak_time_bins.append(find_maximal_time_bin(peak, stft, overlap_factor))


    ## Raw Estimates
    doa = psa.compute_DOA(stft)
    if plot:
        psa.plot_doa(doa,title=plot_title+'DoA estimates, w='+str(analysis_window_size))

    # diffuseness = psa.compute_diffuseness(stft)
    # if plot:
    #     psa.plot_diffuseness(diffuseness,title=plot_title+'Diffuseness, w='+str(analysis_window_size))


    neighborhood_size = 3
    ## DOA variance
    doa_var = copy.deepcopy(doa)
    for n in range(doa.get_num_time_bins()):
        for k in range(doa.get_num_frequency_bins()):
            local_var_azi = 0
            local_var_ele = 0
            local_azi = []
            local_ele = []
            r = int(np.floor(neighborhood_size/2)) # neighborhood radius
            for x in np.arange(n - r, n + r + 1):
                for y in np.arange(k - r, k + r + 1):
                    if x < 0:
                        continue
                    elif x >= doa.get_num_time_bins():
                        continue
                    if y < 0:
                        continue
                    elif y >= doa.get_num_frequency_bins():
                        continue
                    local_azi.append(doa.data[0,y,x])
                    local_ele.append(doa.data[1,y,x])
                    # local_var_azi += np.std(doa.data[0,y,x])
                    # local_var_ele += np.std(doa.data[1,y,x])
            local_var_azi = scipy.stats.circvar(np.array(local_azi))
            local_var_ele = np.var(np.array(local_ele))
            doa_var.data[0,k,n] = local_var_azi
            doa_var.data[1,k,n] = local_var_ele

    ## DOA VAR salience

    neighborhood_size = round_up_to_odd(doa_var.get_num_frequency_bins())
    doa_var_salience = threshold_local(doa_var.data[0,:],block_size=neighborhood_size)
    doa_var_max_salience_mask = copy.deepcopy(doa_var)
    doa_var_min_salience_mask = copy.deepcopy(doa_var)
    for k in range(doa_var.get_num_frequency_bins()):
        for n in range(doa_var.get_num_time_bins()):

            if doa_var.data[0, k, n] > doa_var_salience[k, n]:
                doa_var_max_salience_mask.data[:, k, n] = 1.
            else:
                doa_var_max_salience_mask.data[:, k, n] = np.nan

            if doa_var.data[0, k, n] < doa_var_salience[k, n]:
                doa_var_min_salience_mask.data[:, k, n] = 1.
            else:
                doa_var_min_salience_mask.data[:, k, n] = np.nan


    # MINIMUM VARIANCE DOA
    masked_doa = doa.apply_mask(doa_var_min_salience_mask)
    if plot:
        psa.plot_doa(masked_doa,
                     title=plot_title + 'DOA - Minimum variance Salience Masked, w=' + str(analysis_window_size) + ' N: ' + str(
                         neighborhood_size))

    masked_doa = doa.apply_mask(doa_var_max_salience_mask)
    # if plot:
    #     psa.plot_doa(masked_doa,
    #                  title=plot_title + 'DOA - Maximim variance Salience Masked, w=' + str(analysis_window_size) + ' N: ' + str(
    #                      neighborhood_size))


    # if plot:
        # plt.figure()
        # plt.suptitle('DOA VAR')
        # plt.subplot(211)
        # plt.pcolormesh(doa_var.data[0,:,:])
        # plt.subplot(212)
        # plt.pcolormesh(doa_var.data[1,:,:])
        #
        # psa.plot_mask(doa_var_max_salience_mask,title='MAX SALIENCE')
        # psa.plot_mask(doa_var_min_salience_mask,title='MIN SALIENCE')







    ## Energy density
    energy_density_tf = psa.compute_energy_density(stft)
    # if plot:
    #     psa.plot_magnitude_spectrogram(energy_density_tf,title='Energy Density Spectrogram, w='+str(analysis_window_size))


    # Energy density salience

    neighborhood_size = round_up_to_odd(energy_density_tf.get_num_frequency_bins())
    energy_density_salience = threshold_local(energy_density_tf.data[0,:],block_size=neighborhood_size)
    energy_density_salience_mask = copy.deepcopy(energy_density_tf)
    for k in range(energy_density_tf.get_num_frequency_bins()):
        for n in range(energy_density_tf.get_num_time_bins()):
            if energy_density_tf.data[0, k, n] > energy_density_salience[k, n]:
                energy_density_salience_mask.data[:, k, n] = 1.
            else:
                energy_density_salience_mask.data[:, k, n] = np.nan

    # if plot:
    #     fig = plt.figure()
    #     fig.suptitle('energy salience, w=' + str(analysis_window_size))
    #
    #     x = np.arange(np.shape(energy_density_salience)[0])
    #     y = np.arange(np.shape(energy_density_salience)[1])
    #     plt.pcolormesh(y, x, energy_density_salience, norm=LogNorm())
    #     plt.ylabel('Frequency [Hz]')
    #     plt.xlabel('Time [sec]')
    #     plt.colorbar()

    # if plot:
    #     psa.plot_mask(energy_density_salience_mask, title='Energy Salience Mask'+str(analysis_window_size))

    masked_energy = energy_density_tf.apply_mask(energy_density_salience_mask)
    # if plot:
    #     psa.plot_magnitude_spectrogram(masked_energy, title=plot_title+'Energy - Energy Salience Masked, w='+str(analysis_window_size)+' N: '+str(neighborhood_size))

    masked_doa = doa.apply_mask(energy_density_salience_mask)
    if plot:
        psa.plot_doa(masked_doa, title=plot_title+'DOA - Energy Salience Masked, w='+str(analysis_window_size)+' N: '+str(neighborhood_size))

    masked_doa = doa.apply_mask(energy_density_salience_mask).apply_mask(doa_var_min_salience_mask)
    if plot:
        psa.plot_doa(masked_doa, title=plot_title+'DOA - VAR MIN,  Energy Salience Masked, w='+str(analysis_window_size)+' N: '+str(neighborhood_size))


    # masked_diffuseness = diffuseness.apply_mask(energy_density_salience_mask)
    # if plot:
    #     psa.plot_diffuseness(masked_diffuseness, title=plot_title+'Diffuseness - Energy Salience Masked, w='+str(analysis_window_size)+' N: '+str(neighborhood_size))


    # # Diffuseness density salience
    #
    # neighborhood_size = round_up_to_odd(diffuseness.get_num_frequency_bins())
    # diffuseness_salience = threshold_local(diffuseness.data[0,:],block_size=neighborhood_size)
    # diffuseness_salience_mask = copy.deepcopy(diffuseness)
    # for k in range(diffuseness.get_num_frequency_bins()):
    #     for n in range(diffuseness.get_num_time_bins()):
    #         if diffuseness.data[0, k, n] < diffuseness_salience[k, n]:
    #             diffuseness_salience_mask.data[:, k, n] = 1.
    #         else:
    #             diffuseness_salience_mask.data[:, k, n] = np.nan
    #
    # masked_energy = energy_density_tf.apply_mask(diffuseness_salience_mask)
    # if plot:
    #     psa.plot_magnitude_spectrogram(masked_energy, title=plot_title + 'Energy - Diffuseness Salience Masked, w=' + str(
    #         analysis_window_size) + ' N: ' + str(neighborhood_size))
    #
    # masked_doa = doa.apply_mask(diffuseness_salience_mask)
    # if plot:
    #     psa.plot_doa(masked_doa, title=plot_title + 'DOA - Diffuseness Salience Masked, w=' + str(
    #         analysis_window_size) + ' N: ' + str(neighborhood_size))
    #
    # masked_diffuseness = diffuseness.apply_mask(diffuseness_salience_mask)
    # if plot:
    #     psa.plot_diffuseness(masked_diffuseness, title=plot_title + 'Diffuseness - Diffuseness Salience Masked, w=' + str(
    #         analysis_window_size) + ' N: ' + str(neighborhood_size))

    # #
    # if plot:
    #     psa.plot_mask(diffuseness_salience_mask, title='Diffuseness Salience Mask'+str(neighborhood_size))
    # #
    # masked_dif = diffuseness.apply_mask(diffuseness_salience_mask)
    # if plot:
    #     psa.plot_diffuseness(masked_dif, title='Diffuseness - Salience Masked'+str(neighborhood_size))
    #
    # energy_diffuseness_mask = energy_density_salience_mask.apply_mask(diffuseness_salience_mask)
    #
    # masked_doa = masked_doa.apply_mask(diffuseness_salience_mask)
    # if plot:
    #     psa.plot_doa(masked_doa, title='DOA - Salience Masked - Diffuseness Masked, w='+str(analysis_window_size))
    #
    # fig = plt.figure()
    # fig.suptitle("diffuseness salience, block:"+str(neighborhood_size), fontsize=16)
    # x = np.arange(np.shape(diffuseness_salience)[0])
    # y = np.arange(np.shape(diffuseness_salience)[1])
    # plt.pcolormesh(y,x, diffuseness_salience, cmap='plasma_r',norm=LogNorm())
    # plt.ylabel('Frequency [Hz]')
    # plt.xlabel('Time [sec]')
    # plt.colorbar()



    # diffuseness_energy_mask = diffuseness_mask.apply_mask(energy_density_mask)
    # if plot:
    #     psa.plot_mask(diffuseness_energy_mask, title='Diffuseness + Energy Density Mask')
    #
    # masked_diffuseness = diffuseness.apply_mask(diffuseness_mask)
    # if plot:
    #     psa.plot_diffuseness(masked_diffuseness, title='Diffuseness, diffuseness mask')
    #
    # masked_diffuseness = masked_diffuseness.apply_mask(energy_density_mask)
    # if plot:
    #     psa.plot_diffuseness(masked_diffuseness, title='Diffuseness, energy density maskm diffuseness mask')
    #
    # masked_doa = masked_doa.apply_mask(diffuseness_mask)
    # if plot:
    #     psa.plot_doa(masked_doa,title='DoA estimates, energy density mask, diffuseness mask')


    ### Find horizontal-contiguous bins on doa estimates
    time_bins_with_energy = []
    for n in range(energy_density_salience_mask.get_num_time_bins()):
        if not np.all(np.isnan(energy_density_salience_mask.data[0,:,n])):
            time_bins_with_energy.append(n)

    time_region_starts = []
    time_region_ends = []
    for idx, b in enumerate(time_bins_with_energy):
        if time_bins_with_energy[idx] - time_bins_with_energy[idx - 1] != 1:
            time_region_starts.append(time_bins_with_energy[idx])
            time_region_ends.append(time_bins_with_energy[idx - 1]+1)

    time_region_starts.sort()
    time_region_ends.sort()
    assert len(time_region_starts) == len(time_region_ends)


    # Compute local doa estimates on contiguous time regions
    peak_stats = []
    for idx in range(len(time_region_starts)):
        n_range = range(time_region_starts[idx],time_region_ends[idx])

        local_azi = []
        local_ele = []

        index_of_bins_estimated = []
        for n in n_range:
            # Filter nans
            for k in np.arange(energy_density_salience_mask.get_num_frequency_bins()):

                if not np.isnan(energy_density_salience_mask.data[0, k, n]):
                    local_azi.append(masked_doa.data[0, k, n])
                    local_ele.append(masked_doa.data[1, k, n])
                    index_of_bins_estimated.append(n)

        local_azi = np.asarray(local_azi)
        local_ele = np.asarray(local_ele)
        # local_dif = np.asarray(local_dif)

        local_azi_mean = scipy.stats.circmean(local_azi, high=np.pi, low=-np.pi)
        local_azi_std = scipy.stats.circstd(local_azi, high=np.pi, low=-np.pi)
        local_ele_mean = np.mean(local_ele)
        local_ele_std = np.std(local_ele)

        if plot:
            fig = plt.figure()
            ax = fig.add_subplot(111)
            plt.suptitle(plot_title+'FREQ BIN '+str(idx)+' - bins '+str(n_range))
            # cmap = plt.cm.get_cmap("copper")
            plt.grid()
            plt.xlim(-np.pi,np.pi)
            plt.ylim(-np.pi/2,np.pi/2)

            plt.scatter(local_azi, local_ele, marker='o',)
            plt.scatter(local_azi_mean, local_ele_mean, c='red', s=20, marker='+')
            ax.add_patch(Ellipse(xy=(local_azi_mean,local_ele_mean), width=local_azi_std,height=local_ele_std, alpha=0.5))
            ax.add_patch(Ellipse(xy=(local_azi_mean,local_ele_mean), width=3*local_azi_std,height=3*local_ele_std, alpha=0.1))


        mean_location = np.mean(index_of_bins_estimated)
        time_resolution_ms = float(processing_window_ms) / masked_doa.get_num_time_bins()
        estimated_location_ms = mean_location * time_resolution_ms

        peak_stats.append(
            [estimated_location_ms, [local_azi_mean, local_azi_std], [local_ele_mean, local_ele_std]])

    ### Return peak stats
    return peak_stats
Exemple #6
0
    X = psa.Stft.fromSignal(x,
                            window_size=window_size,
                            window_overlap=window_overlap,
                            nfft=nfft)
    X_doa = psa.compute_DOA(X)
    psa.plot_doa(X_doa, title)
    plt.show()
    return X_doa


# True reverberant
Y_doa = plot_doa(y_t, 'y_t')
# Estimated reverberant
O_doa = plot_doa(est_s_t, 'output_t')
# Difference
psa.plot_doa(Y_doa - O_doa, 'difference true-estimated')
plt.show()

# Reverberant

# Difference
psa.plot_doa(y2_t, 'y2')
plt.show()
psa.plot_doa(x2_t, 'x2')
plt.show()

#
# # Resynthesis signal
# R1_doa = plot_doa(r1_t, 'r1_t')
# R2_doa = plot_doa(r2_t, 'r2_t')
#
        window_overlap = window_size // 2

        _, _, S_dir = scipy.signal.stft(s_dir,
                                        fs,
                                        nperseg=window_size,
                                        noverlap=window_overlap)

        s_tot_ambi = psa.Signal(bformat, fs, 'acn', 'n3d')
        S_tot_ambi = psa.Stft.fromSignal(s_tot_ambi,
                                         window_size=window_size,
                                         window_overlap=window_overlap)
        doa = psa.compute_DOA(S_tot_ambi)

        psa.plot_signal(s_tot_ambi)
        psa.plot_magnitude_spectrogram(S_tot_ambi)
        psa.plot_doa(doa)

        ### DIFFUSENESS

        ita = S_tot_ambi.compute_ita(r=r)
        psa.plot_directivity(ita, title='ita')

        # ita_re = S_tot_ambi.compute_ita_re(r=r)
        # psa.plot_directivity(ita_re, title='ita re')

        ### KSI
        ksi = S_tot_ambi.compute_ksi(r=r)
        psa.plot_directivity(ksi, title='ksi')

        # ksi_re = S_tot_ambi.compute_ksi_re(r=r)
        # psa.plot_directivity(ksi_re, title='ksi re')
Exemple #8
0
plt.figure()
plt.plot(sh_rirs)
plt.show()

signal_len_samples = int(np.floor(1. * fs))
signal = np.random.randn(signal_len_samples)

reverberant_signal = np.zeros((signal_len_samples, 4))
for i in range(4):
    reverberant_signal[:, i] = scipy.signal.fftconvolve(
        signal, sh_rirs[:, i].squeeze())[:signal_len_samples]
x = psa.Signal(reverberant_signal.T, fs, 'acn', 'sn3d')
psa.plot_signal(x, title='waveform')

analysis_window_size = 512
window_overlap = analysis_window_size // 2
fft_size = analysis_window_size
stft = psa.Stft.fromSignal(x,
                           window_size=analysis_window_size,
                           window_overlap=window_overlap,
                           nfft=fft_size)
psa.plot_magnitude_spectrogram(stft, title='magnitude spectrogram')
doa = psa.compute_DOA(stft)
psa.plot_doa(doa, title='doa')

plt.show()

i = psa.compute_intensity_vector(stft)

psa.plot_magnitude_spectrogram(i)
plt.show()