Esempio n. 1
0
 def __init__(self, min_hunt, max_hunt, min_range, low_per_mil, high_per_mil, start_window, start_count, stop_window, stop_count):
     assert high_per_mil > low_per_mil
     assert min_range > 0
     assert start_window > start_count
     assert stop_window > stop_count
     self.tracker = RangeTracker(min_hunt, max_hunt)
     self.data = low_per_mil / 1000, high_per_mil / 1000, min_range, start_window, start_count, stop_window, stop_count
     dprint('UtteranceTagger', '__init__:', min_hunt, max_hunt, *self.data)
     count = 0
     self.state = BAC, count, deque()        
Esempio n. 2
0
    def __call__(self, data):
        # empirically, (and interestingly): this (near 16-bit) shift factor lets
        # the exposed ref property be 1 while giving occasional 0 dB output on a
        # Mac with "Use ambient noise reduction" set to on and "Input volume"
        # slider set to the minimum (settings which give about the lowest level
        # signal data you can get on the mac)
        ref_factor = N.float32(1 / (1 << 18))
        # this should be an adequate range of dBs
        clip_lo, clip_hi = 0, 140

        assert data.shape[-1] == self.data_length
        band = data[self.select]
        assert len(band) >= 1
        ref = N.float32(self.ref * len(band) * ref_factor)
        band *= band
        bandsum = band.sum()
        # XXX not ndarrays....
        sum = float(bandsum)
        if False:
            # slight smoothing; bizarre: causes Python ncurses to bog down!
            sum = (self.prev + sum) / 2
            self.prev = sum
        # space is 10 * log10 (energy)
        dB = (10 * math.log10(sum / ref)) if sum > ref else 0
        dB = max(clip_lo, dB)
        dB = min(clip_hi, dB)
        dcheck(vumeterlog) and dprint(vumeterlog, 'dB', N.float32(dB), 'channels', len(band), 'ref', self.ref, 'scaled_ref', ref, 'energy', bandsum)
        # we return the dB in decibel scaling
        return dB
Esempio n. 3
0
def ddt_accum(value):
    event0, event1 = value
    assert event0[0] == event1[0]
    data0 = event0[1]
    data1 = event1[1]
    assert len(data0) == len(data1) == 2
    line_len = 255
    line = [' '] * line_len
    offset = line_len // 4

    label = event0[0]
    label_stats[0] += 1
    if label == True:
        label_stats[1] += 1
    else:
        assert label == False
    if data0[0][1] == label:
        accuracy_stats[0] += 1
    if data1[0][1] == label:
        accuracy_stats[1] += 1

    global last_label
    if label != last_label:
        dprint('ddt_accum', label)
        dprint('ddt_accum', ' ', label_stats, accuracy_stats)
        last_label = label
    
    if data0[0][1] == True:
        c0True = data0[0][0]
        c0False = data0[1][0]
    else:
        assert data0[0][1] == False
        c0True = data0[1][0]
        c0False = data0[0][0]

    if data1[0][1] == True:
        c1True = data1[0][0]
        c1False = data1[1][0]
    else:
        assert data1[0][1] == False
        c1True = data1[1][0]
        c1False = data1[0][0]

    llrTrue = math.log(c0True / c1True)
    llrFalse = math.log(c0False / c1False)

    if llrTrue > 0:
        ddt_sums[0] += 1
    elif llrTrue < 0:
        ddt_sums[0] -= 1
    else:
        pass

    if llrFalse > 0:
        ddt_sums[1] += 1
    elif llrFalse < 0:
        ddt_sums[1] -= 1
    else:
        pass

    def clip(val, scale):
        val = int(val * scale) + offset
        val = max(0, val)
        val = min(line_len-1, val)
        return val
        
    indexTrue = clip(llrTrue, ddt_scale)
    indexFalse = clip(llrFalse, ddt_scale)
    line[indexTrue] = '*'
    line[indexFalse] = '|'
        
    indexTrue = clip(ddt_sums[0], ddt_accum_scale)
    indexFalse = clip(ddt_sums[1], ddt_accum_scale)
    line[indexTrue] = '0'
    line[indexFalse] = 'X'

    line.append('\n')
    displaystream2.write(''.join(line))