def extract_mfcc_features_one_channel(signal, window_block=None, **kwargs):
    """

    :param signal:
    :param window_block: a float, if used the signal is segmented in multiple window
    :param kwargs:
    this function a 'window_block' argument
    :return: a list of features (if window_block is None, the list contains only one features entry)
    """
    #window_block = kwargs.pop('window_block', None)
    if len(signal.shape)>1:
        signal = signal[:,0]
    else:
        signal = signal

    if window_block is None:
        block_size = signal.size
        overlap = 0.
    else:
        block_size = min(window_block * kwargs['fs'], signal.size)
        overlap = int(block_size / 2) #1  # int(block_size / 2)

    res = []

    for num, s in enumerate(segment_axis(signal, block_size, overlap=overlap, end='cut')):
        res.append(extract_mfcc_features(s, **kwargs))
    return res
    def processed_signal(self, data=None, fs=48000., window_block=1.0):
        """
        :param data:
        :param fs:
        :param window_block: duration of window block to use, default : 1.0 second, if None, the full signal is used as
        one big window
        :return: list of ClassificationResult namedtuple
        """

        assert (np.ndarray == type(data))
        assert (len(data.shape) == 1)  # we only support one channel for now
        assert (data.size != 0)

        res = []
        if window_block is None:
            block_size = data.size
        else:
            block_size = min(window_block * fs, data.size)
        overlap = int(block_size) >> 1  # int(block_size / 2)

        for num, signal in enumerate(segment_axis(data, block_size, overlap=overlap, end='cut')):
            preprocessed_features = get_features(signal, nfft=self.nfft, scaler=self.scaler)
            confidence = get_confidence_prediction(self.clf, preprocessed_features)
            # if confidence > self.confidence_threshold:
            class_predicted = self.clf.predict(preprocessed_features)[
                0]  # [0] : as asked by Alex we return only class in string not an np.array
            timestamp_start = num * (block_size - overlap) / float(fs)
            # print("timestamp_start is %s" % timestamp_start)
            timestamp_end = timestamp_start + block_size / float(fs)
            score = self.post_processed_score(confidence=confidence, class_predicted=class_predicted)
            new_result = ClassificationResult(timestamp_start, timestamp_end, class_predicted, confidence, score)
            res.append(new_result)
        return res
    def processed_signal(self, data=None, fs=48000., window_block=1.0):
        """
        :param data:
        :param fs:
        :param window_block: duration of window block to use, default : 1.0 second, if None, the full signal is used as
        one big window
        :return: list of ClassificationResult namedtuple
        """

        assert (np.ndarray == type(data))
        assert (len(data.shape) == 1)  # we only support one channel for now
        assert (data.size != 0)

        res = []
        if window_block is None:
            block_size = data.size
        else:
            block_size = min(window_block * fs, data.size)
        overlap = int(block_size) >> 1  # int(block_size / 2)

        for num, signal in enumerate(
                segment_axis(data, block_size, overlap=overlap, end='cut')):
            preprocessed_features = get_features(signal,
                                                 nfft=self.nfft,
                                                 scaler=self.scaler)
            confidence = get_confidence_prediction(self.clf,
                                                   preprocessed_features)
            # if confidence > self.confidence_threshold:
            class_predicted = self.clf.predict(
                preprocessed_features
            )[0]  # [0] : as asked by Alex we return only class in string not an np.array
            timestamp_start = num * (block_size - overlap) / float(fs)
            # print("timestamp_start is %s" % timestamp_start)
            timestamp_end = timestamp_start + block_size / float(fs)
            score = self.post_processed_score(confidence=confidence,
                                              class_predicted=class_predicted)
            new_result = ClassificationResult(timestamp_start, timestamp_end,
                                              class_predicted, confidence,
                                              score)
            res.append(new_result)
        return res