Example #1
0
def skip():
    """
    Called when the Skip button is pressed. Skips the song, according to the implementation of the media player.
    """
    Playlist.skip_song()
    current_song_text.setText("{}".format(Playlist.get_current_song()))
    play_pause_btn.setText("Pause")
Example #2
0
def refresh():
    """
    Called when new images are to be displayed on the screen (i.e. new reaction from the user). Sets a new pixmap
    component for each of the image object on the screen. Finally, repaints the whole window.
    """

    #  The later two parameters of scaled() are for keeping the aspect ratio and smoothing the result of scaling
    camera_img.setPixmap(QPixmap(CAMERA_IMG_PATH).scaled(camera_img.width(), camera_img.height(), 1, 1))
    live_img.setPixmap(QPixmap(LIVE_IMG_PATH).scaled(live_img.width(), live_img.height(), 1, 1))
    prog_img.setPixmap(QPixmap(PROG_IMG_PATH).scaled(prog_img.width(), prog_img.height(), 1, 1))

    if Playlist.is_playing():
        current_song_text.setText("{}".format(Playlist.get_current_song()))

    window.show()
Example #3
0
def init():
    """
    Initializes all the objects on the GUI by loading "EMMA.ui" which contains all the necessary UI elements and
    their placing. References them to properly named objects so they can be used in code. Displays the GUI.
    """
    global app, window, dead, frozen, play_pause_btn, skip_btn, vol_slider, vol_text, pause_EMMA_btn, registe_user_btn, \
        camera_img, live_img, prog_img, current_song_text, now_playing_text, register_window
    app = QApplication(sys.argv)
    window = EmmaWindow()
    uic.loadUi(os.path.join(sys.path[0], "user_interface/EMMA.ui"), window)
    dead = False  # Used to signal if EMMA and the GUI should stop.
    frozen = False  # Used to signal if EMMA should pause.

    # Find and reference the UI elements from EMMA.ui
    play_pause_btn = window.findChild(QPushButton, 'play_pause_btn')
    skip_btn = window.findChild(QPushButton, 'skip_btn')
    pause_EMMA_btn = window.findChild(QPushButton, 'pause_EMMA_btn')
    register_user_btn = window.findChild(QPushButton, 'register_user_btn')
    vol_slider = window.findChild(QSlider, 'vol_slider')
    vol_text = window.findChild(QLabel, 'vol_slider_text')
    current_song_text = window.findChild(QLabel, 'current_song_text')
    now_playing_text = window.findChild(QLabel, 'now_playing_text')
    camera_img = window.findChild(QLabel, 'camera_img')
    live_img = window.findChild(QLabel, 'live_img')
    prog_img = window.findChild(QLabel, 'prog_img')
    GUI_playlist.init(window)  # Links the playlist widget to its functions
    Playlist.set_volume(vol_slider.value())  # Make sure VLC has the same value as the slider initial value.

    register_window = QErrorMessage()  # Pop-up window for registering a user.
    register_window.setWindowTitle('Face Identification')
    register_window.setWindowIcon(QIcon('user_interface/emma_icon.png'))

    # Connects elements to callback functions
    play_pause_btn.clicked.connect(play_pause)
    skip_btn.clicked.connect(skip)
    vol_slider.valueChanged.connect(change_volume)
    pause_EMMA_btn.clicked.connect(start_pause_EMMA)
    register_user_btn.clicked.connect(register_user)
    current_song_text.hide()
    now_playing_text.hide()

    buttons_initialize(play_pause, skip)

    window.show()
Example #4
0
def play_entry(label):
    """
    Called to trigger the playing/pausing of a specific entry, given its label. This also sets specific buttons
    and labels to appropriate states to reflect the change.
    :param label: The label containing the name of the entry to be played or paused.
    :return:
    """
    Playlist.play_song(label)
    if not Playlist.is_playing():
        user_interface.GUI.play_pause_btn.setText("Pause")
        user_interface.GUI.now_playing_text.show()
        user_interface.GUI.current_song_text.setText("{}".format(label.text()))

        if not user_interface.GUI.current_song_text.isVisible():
            user_interface.GUI.current_song_text.show()
    else:
        user_interface.GUI.play_pause_btn.setText("Play")
        Playlist.pause()
        user_interface.GUI.now_playing_text.hide()
Example #5
0
def play_pause():
    """
    Called when the Play/Pause button is pressed. Starts/stops/resumes a song.
    """
    if not Playlist.is_playing():
        play_pause_btn.setText("Pause")
        Playlist.play()
        now_playing_text.show()
        if not current_song_text.isVisible():
            current_song_text.setText("{}".format(Playlist.get_current_song()))
            current_song_text.show()
    else:
        play_pause_btn.setText("Play")
        Playlist.pause()
        now_playing_text.hide()
Example #6
0
def initialize():
    """
    Initialize the MongoDB client and create all collections.
    :return: Nothing.
    """

    global args

    client = MongoClient()
    db = client.test_database

    if args.test:
        sessionID = 'test'
    else:
        import random
        import string
        sessionID = ''.join(
            random.choices(string.ascii_uppercase + string.digits, k=15))

    track_history.create_track_log(db, sessionID)
    if args.test:
        print('Created track history...')

    progress_history.create_progress_log(db, sessionID)
    if args.test:
        print('Created progress history...')

    aggdata.create_agg_log(db, sessionID)

    if args.test:
        print('Created aggregated data logs...')

    # TODO: raise IndexError('Cannot choose from an empty sequence') from None
    Playlist.song_player(db, sessionID, args.repeat, args.test)

    if args.azure:
        plotter.set_azure_flag()
    plotter.init()

    if args.test:
        print('Setting up the buttons...')

    # if not os.path.isfile('settings.cfg'):
    #     with open('settings.cfg', 'w') as cfg_file:
    #         config = configparser.ConfigParser()
    #         config['AZURE'] = {
    #             'AZURE_KEY': '3108ba7dc2f84239b1b94961906167aa',
    #             'AZURE_ENDPOINT': 'https://designprojectfacetest.cognitiveservices.azure.com'
    #         }
    #
    #         config['RENDER'] = {
    #             'RENDER_ENDPOINT': 'https://fastai-model.onrender.com/analyze'
    #         }
    #
    #         config['MODEL_EMOTIONS'] = {
    #             'HAPPINESS_MULTIPLIER': '1.0',
    #             'NEUTRAL_MULTIPLIER': '0.1',
    #             'ANGER_MULTIPLIER': '-2.0',
    #             'SADNESS_MULTIPLIER': '-5.0'
    #         }
    #
    #         config['AZURE_EMOTIONS'] = {
    #             'SURPRISE_MULTIPLIER': '0.25',
    #             'CONTEMPT_MULTIPLIER': '-5.0',
    #             'DISGUST_MULTIPLIER': '-5.0',
    #             'FEAR_MULTIPLIER': '-5.0'
    #         }
    #
    #         config.write(cfg_file)

    return db, sessionID
Example #7
0
def main():
    """
    Main method.
    """

    # Choose running model.
    global args
    args = CLIparser.parseFlags()

    db, sessionID = initialize()
    # Start the camera and the GUI.
    thread = threading.Thread(target=GUI.run)
    thread.setDaemon(True)
    vc = cv2.VideoCapture(0)
    fv = FaceVerification(sessionID, args.test)
    GUI.setSomeVariables(vc, sessionID, fv)
    # vc.set(cv2.CAP_PROP_BUFFERSIZE, 1)
    thread.start()

    start_time = time.time()

    while True:

        frame = get_frame(vc)

        # Query the Model once every 3 seconds.
        end_time = time.time()

        if end_time - start_time < 3.1:  # Uncomment the following two lines for a continuos camera feed.
            #WARNING: A Raspberry Pi 4 model B, 2GB RAM, will not support this change.
            # cv2.imwrite('frame.png', frame)
            # GUI.refresh_frame()
            continue

        start_time = time.time()

        if not GUI.dead and not GUI.frozen:
            if args.azure:
                # Query Azure.
                if fv.getStatus():
                    emotions = get_facial_emotion(frame, fv)
                else:
                    emotions = get_facial_emotion(frame, None)
            else:
                # Query our model.
                remove_frame()
                face_isolated = facechop(frame)
                emotions = classify(None, face_isolated)
            if emotions:
                if Playlist.is_playing():
                    # Update global descriptors based on song descriptors and user emotions.
                    current_song = Playlist.get_current_song()
                    current_descriptors = Tracklist.get_song(
                        db, current_song)['descriptors']
                    emotion_list = getEmotionList(emotions)
                    descriptors.update_descriptors(emotion_list,
                                                   current_descriptors)
                    progress_history.update_progress_log(
                        db, sessionID, emotion_list)
                    current_song_score = descriptors.get_song_score(
                        current_descriptors)
                    if args.test:
                        print('Current song score: %s' % current_song_score)

                    # Change song if song score is low.
                    if current_song_score <= THRESHOLD:
                        Playlist.skip_song()

                remove_frame("progress_plot")
                remove_frame("emotions_plot")
                if args.azure:
                    plotter.write_plot(emotions)
                else:
                    plotter.write_plot({'': emotions})
                GUI.refresh()

        if GUI.dead:
            if args.test:
                print("GUI is closed, shutting down...")
            break

    print("[EMMA]: Closing the camera...")
    close_camera(vc)
    cleanup()
    thread.join()
Example #8
0
def set_volume():
    current_volume = check_volume()
    vol_slider.setValue(current_volume)
    Playlist.set_volume(current_volume)
Example #9
0
def change_volume():
    """
    Called when the slider value changes. Changes the volume of the player accordingly.
    """
    vol_text.setText("{}%".format(vol_slider.value()))
    Playlist.set_volume(vol_slider.value())