コード例 #1
0
def test_4mic():
    from pixel_ring import pixel_ring
    from gpiozero import LED
    #pixel_ring.wakeup()
    import signal
    import time
    power = LED(5)
    power.on()
    

    is_quit = threading.Event()

    def signal_handler(sig, num):
        is_quit.set()
        print('Quit')

    signal.signal(signal.SIGINT, signal_handler)
 
    with MicArray(16000, 4, 16000 / 4)  as mic:
        a=[];
        for chunk in mic.read_chunks():
            direction = mic.get_direction(chunk)
            
            if len(a)>4:
                                
				#a.remove(max(a));
##				a.remove(min(a));
				#a.remove(min(a));
                                
##				angle=sum(a)/3;
##				print("angle:", angle);
##				print(a);
##				
                                angle=[np.bincount(a).argmax()];
                                b=angle[0]
                                
                                print(b);
                                print(a);
                                #light led
				position = int((b) / (360 / 12))
				pixels = [0, 0, 0, 10] * 12
				pixels[position * 4 + 2] = 10
                                pixel_ring.show(pixels)
                                a.remove(a[0]);
                                
			    
            else:
				new_angle=angle_to_index_angle(direction);
				a.append(new_angle);
				
	    
            #print(angle);
##            position = int((angle) / (360 / 12));
##            pixels = [0, 0, 0, 10] * 12
##            pixels[position * 4 + 2] = 10
##            pixel_ring.show(pixels)
            if is_quit.is_set():
                break
コード例 #2
0
def consume(info, sins, board, opts, vars):

    sr = sins[0].sr
    dim = sins[0].dim
    #x = np.asarray(sins[0])
    #pixel_ring.wakeup()
    position = 3

    pixels = [0, 0, 0, 0] * 12
    pixels[5] = 255
    pixels[8] = 100

    pixel_ring.show(pixels)
コード例 #3
0
def main():
    vad = webrtcvad.Vad(3)

    speech_count = 0
    chunks = []
    doa_chunks = int(DOA_FRAMES / VAD_FRAMES)

    try:
        with MicArray(RATE, CHANNELS, RATE * VAD_FRAMES / 1000) as mic:
            a = []
            for chunk in mic.read_chunks():
                # Use single channel audio to detect voice activity
                if vad.is_speech(chunk[0::CHANNELS].tobytes(), RATE):
                    speech_count += 1
                    sys.stdout.write('1')
                else:
                    sys.stdout.write('0')

                sys.stdout.flush()

                chunks.append(chunk)
                if len(chunks) == doa_chunks:
                    if speech_count > (doa_chunks / 2):
                        frames = np.concatenate(chunks)
                        direction = mic.get_direction(frames)
                        if len(a) > 2:
                            angle = [np.bincount(a).argmax()]
                            b = angle[0]

                            position = int((b) / (360 / 12))
                            pixels = [0, 0, 0, 10] * 12
                            pixels[position * 4 + 2] = 10
                            pixel_ring.show(pixels)
                            print('\n{}'.format(int(b)))
                            a.remove(a[0])
                        else:
                            new_angle = angle_to_index_angle(direction)
                            a.append(new_angle)
                    speech_count = 0
                    chunks = []

    except KeyboardInterrupt:
        pass

    pixel_ring.off()
コード例 #4
0
def set_leds():
    '''
    Sets pretty colors
    '''
    global clap
    br = 50
    if clap == 1:
        pixel_ring.show([0, br, br, br, 0, 0, 0, 0, 0, 0, 0, 0]*4)
    elif clap == 2:
        pixel_ring.show([0, br, br, br, 0, br, br, br, 0, 0, 0, 0]*4)
    elif clap == 3:
        pixel_ring.show([0, br, br, br, 0, br, br, br, 0, br, br, br,]*4)
コード例 #5
0
def main():
    global clap
    global flag
    global pin

    args = sys.argv[1:]
    observer = Observer()
    mh = MyHandler(patterns = ["*/clapper_wait.txt"])
    observer.schedule(mh, '/home/pi/')
    observer.start()

    # exit_immediate = 0

    chunk = 1024
    FORMAT = pyaudio.paInt16
    CHANNELS = 1
    RATE = 44100
    max_threshold = 4000
    corr_threshold = 8e8
    max_value = 0
    p = pyaudio.PyAudio()
    stream = p.open(format=FORMAT,
             channels=CHANNELS, 
             rate=RATE, 
             input=True,
             output=True,
             frames_per_buffer=chunk)          
    GPIO.setmode(GPIO.BCM)
    GPIO.setup(pin, GPIO.OUT)

    # Load the reference wave form. All clappiness is determined in relation to this clap
    sample_waveform = np.array(np.load("/home/pi/Documents/clap/pi-clap/golden_clap.npy"), dtype=np.float)
    sw_fft = np.fft.fft(sample_waveform) # Now you're cooking with gas

    print ("Clap detection initialized")
    pixel_ring.off()
    c = 0
    last_intensity = 0
    last_time = time.time()
    while True:
        data = stream.read(chunk)
        as_ints = array('h', data)
        max_value = max(as_ints)
        #print("max: {}, time = {}".format(max_value, time.time())),
        if max_value > max_threshold and max_value >= last_intensity: 
            as_float = np.array(as_ints, dtype=np.float)

            # Measure loudness
            mag = np.sum(as_float**2)

            #Measure clappiness
            corr = np.abs(np.fft.ifft(np.fft.fft(as_float)*sw_fft))**2/mag
            corr_value = np.max(corr)

            if corr_value > corr_threshold:
                #print("corr: {}, max: {}, claps: {}".format(corr_value, max_value, clap))
                #np.save("hand_clap" + str(c) + ".npy", as_ints) # save 
                #print("saving {}".format(c))
                c += 1

                now = time.time()
                if (now > last_time + 0.2): #better debounce (prevent samples from being stuck in fifo
                    clap += 1
                    set_leds()
                    last_time = now
                    print("Clapped")
            if mh.check_paused():
                # If Clapper has been paused
                print("Clapper Paused for {} minutes".format(mh.data))
                br = 30 # brightness
                pixel_ring.show([0, 0, br, 0]*12) # visually inform pause
                clap = 0
            else:
                # If clapper has not been paused
                if clap == 1 and flag == 0:
                    _thread.start_new_thread( waitForClaps, ("waitThread",) )
                    flag = 1

        if exitFlag:
            sys.exit(0)
        last_intensity = max_value