Esempio n. 1
0
def decodeInitialization(chunk, rate=96000.0):
    settings = {}
    diffs = at.findTdiffs(chunk, rate=rate)

    params = np.load("initialization_settings.npz")

    # Decode channel
    # Calculate square error of received time distances with reference
    ch_times = diffs[0:2]
    dists = np.power(params['channels'] - ch_times, 2).sum(1)

    # TODO: Probably error message if minimum distance is too large

    # Assume the group with smallest error
    settings['channel'] = dists.argmin() + 1

    # Read intermediate, though not used here at the moment
    inter_dist = np.power(params['intermediate'] - diffs[2:5], 2).sum()

    # TODO: Probably error message if distance is too large

    # Read group settings
    settings['group'] = np.array([])
    pos = 5
    length = diffs.shape[0]

    while pos < (length - 1):
        if length - pos == 2:
            # Last presence pulse
            # Calculate distance, though not used here at the moment
            dist = np.power(params['grp_present'][0:2] - diffs[pos:pos + 2],
                            2).sum()
            settings['group'] = np.append(settings['group'], True)
            pos += 2
        else:
            # There is more than one group remaining
            dist_present = np.power(params['grp_present'] - diffs[pos:pos + 3],
                                    2).sum()
            dist_absent = np.power(params['grp_absent'] - diffs[pos], 2)

            if dist_present < dist_absent:
                settings['group'] = np.append(settings['group'], True)
                pos += 3
            else:
                settings['group'] = np.append(settings['group'], False)
                pos += 1

    return settings
Esempio n. 2
0
def decodeInitialization(chunk, rate=96000.0):
    settings = {}
    diffs = at.findTdiffs(chunk, rate=rate)
    
    params = np.load("initialization_settings.npz")
    
    # Decode channel
    # Calculate square error of received time distances with reference
    ch_times = diffs[0:2]
    dists = np.power(params['channels'] - ch_times, 2).sum(1)
    
    # TODO: Probably error message if minimum distance is too large
    
    # Assume the group with smallest error
    settings['channel'] = dists.argmin() + 1
    
    # Read intermediate, though not used here at the moment
    inter_dist = np.power(params['intermediate'] - diffs[2:5], 2).sum()
    
    # TODO: Probably error message if distance is too large
    
    # Read group settings
    settings['group'] = np.array([])
    pos = 5
    length = diffs.shape[0]
    
    while pos < (length - 1):
        if length - pos == 2:
            # Last presence pulse
            # Calculate distance, though not used here at the moment
            dist = np.power(params['grp_present'][0:2] - diffs[pos:pos+2], 2).sum()
            settings['group'] = np.append(settings['group'], True)
            pos += 2
        else:
            # There is more than one group remaining
            dist_present = np.power(params['grp_present'] - diffs[pos:pos+3], 2).sum()
            dist_absent = np.power(params['grp_absent'] - diffs[pos], 2)
            
            if dist_present < dist_absent:
                settings['group'] = np.append(settings['group'], True)
                pos += 3
            else:
                settings['group'] = np.append(settings['group'], False)
                pos += 1
    
    return settings
Esempio n. 3
0
wavs.append(al.wavread("Samples/Ch1_G2_M128_NoMaster_NoFlashPresent_sb900.wav")[0][:,0])
wavs.append(al.wavread("Samples/Ch1_G3_M128_NoMaster_NoFlashPresent_sb900.wav")[0][:,0])
wavs.append(al.wavread("Samples/Ch1_G1_M128_G2_M128_NoMaster_NoFlashPresent_sb900.wav")[0][:,0])
wavs.append(al.wavread("Samples/Ch1_G1_M128_G3_M128_NoMaster_NoFlashPresent_sb900.wav")[0][:,0])
wavs.append(al.wavread("Samples/Ch1_G2_M128_G3_M128_NoMaster_NoFlashPresent_sb900.wav")[0][:,0])
wavs.append(al.wavread("Samples/Ch1_G1_M128_G2_M128_G3_M128_NoMaster_NoFlashPresent_sb900.wav")[0][:,0])
N = len(wavs)

chunks = [];
bl1 = [];
td1 = [];
for wav in wavs:
    nc = tools.findChunks(wav, 1000.0, 0.1)
    chunks.append(nc)
    bl1.append(wav[nc[0].start:nc[0].end]);
    td1.append(tools.findTdiffs(bl1[-1]));
    
# Calculate average tdiffs for the next three blocks
tdb1 = 0.0
tdb2 = 0.0
tdb3 = 0.0
for td in td1:
    tdb1 = tdb1 + td[2]
    tdb2 = tdb2 + td[3]
    tdb3 = tdb3 + td[4]

tdb1 = (tdb1 / N);
tdb2 = (tdb2 / N);
tdb3 = (tdb3 / N);

# Calculate average tdiffs for Group Present pulse information
Esempio n. 4
0
#wavs.append(al.wavread("Samples/Ch1_G1_TTL+3.0_NoMaster_NoFlashPresent_d7000.wav")[0][:,0])
#wavs.append(al.wavread("Samples/Ch1_G1_AA-3.0_NoMaster_NoFlashPresent_d7000.wav")[0][:,0])
#wavs.append(al.wavread("Samples/Ch1_G1_AA_0_NoMaster_NoFlashPresent_d7000.wav")[0][:,0])
wavs.append(tools.normalize(al.wavread("Samples/Ch1_G1_AA+3.0_NoMaster_NoFlashPresent_d7000.wav")[0][:,0]))
wavs.append(tools.normalize(al.wavread("Samples/Ch1_G1_TTL_0_G2_TTL_0_NoMaster_NoFlashPresent_d7000.wav")[0][:,0]))
wavs.append(tools.normalize(al.wavread("Samples/Ch1_G1_TTL_0_G2_TTL_0_G3_TTL_0_NoMaster_NoFlashPresent_d7000.wav")[0][:,0]))
wavs.append(tools.normalize(al.wavread("Samples/Ch1_G1_M32_G2_TTL_0_G3_TTL_0_NoMaster_NoFlashPresent_sb900.wav")[0][:,0]))
wavs.append(tools.normalize(al.wavread("Samples/Ch1_G1_M32_G2_M32_G3_TTL_0_NoMaster_NoFlashPresent_sb900.wav")[0][:,0]))
wavs.append(tools.normalize(al.wavread("Samples/Ch1_G1_M32_G2_TTL_0_G3_M32_NoMaster_NoFlashPresent_sb900.wav")[0][:,0]))
wavs.append(tools.normalize(al.wavread("Samples/Ch1_G1_TTL_0_G2_M32_G3_M32_NoMaster_NoFlashPresent_sb900.wav")[0][:,0]))
N = len(wavs);

# Extract the second chunk of every data block
chunks = [tools.findChunks(wav, 2500.0, 0.1) for wav in wavs]
bl2 = [wav[c[1].start:c[1].end] for (wav, c) in zip(wavs, chunks)]
td2 = [tools.findTdiffs(bl) for bl in bl2]
pl2 = [tools.findPulses(bl) for bl in bl2]
plnorm = [p - p[0] for p in pl2]

pl.plot(wavs[-1])
pl.show()

# Group pulses
corr = tools.groupPulses(plnorm)

# Plot result
pl.figure()
pl.hold(True)
for i in range(N):
    pl.plot(corr[i], i * np.ones(len(corr[i])), 'bx')