Пример #1
0
def vote():
    mq = MessageQueue("voting-app")
    participant = request.args.get('participant')
    if participant:
        mq.publish(
            exchange=settings['messaging']['environment'],
            routing_key='action.vote.{}'.format(participant),
            body={
                'participant': participant,
                'last_vote': request.args.get('vote_for', '')
            },
        )
        return 'OK'
    else:
        return 'NOT_OK'
    start_calibration(calibration_id)
    status = wait_for_status('/api/calibrations/' + calibration_id + '/status',
                             'ca_state', ['failed', 'calibrated'])

    if status == 'failed':
        print('Calibration failed, using default calibration instead')
    else:
        print('Calibration successful')

    recording_id = create_recording(participant_id)
    print('Recording started...')
    start_recording(recording_id)

    # Define server
    zmq_socket, zmq_server_addr = create_zmq_server()
    mq = MessageQueue('tobii-sensor')

    # Check which participant
    routing_key_p = '{}.{}'.format(settings['messaging']['new_sensor_tobii'],
                                   participant)
    mq.publish(exchange='sensors',
               routing_key=routing_key_p,
               body={
                   'address': zmq_server_addr,
                   'file_type': 'txt'
               })

    print("Recording in progress...")

    # Init pack
    packed_data = []
Пример #3
0
            while global_runner or q.qsize() != 0:
                data = q.get()
                f.write(data)
                print('{} writes left to do..', q.qsize())
        print('writer closed'.format(log_file))

    _thread = Thread(target=run, args=(log_file, ))
    _thread.deamon = True
    _thread.start()

    thread = Thread(target=storage_writer, args=(log_file, ))
    thread.deamon = True
    thread.start()


mq = MessageQueue('logger')
mq.bind_queue(exchange='sensors',
              routing_key=listen_to_routing_key,
              callback=callback)

resend_new_sensor_messages.resend_new_sensor_messages()
print('[*] Waiting for messages. To exit press CTRL-C')
try:
    mq.listen()
finally:
    global_runner = False
    for sock in sockets:
        print(sock.closed)
        if not sock.closed:
            sock.close()
    mq.stop()
Пример #4
0
import re
import sys
sys.path.append('../..')
from shared import MessageQueue
import yaml

# Settings
SETTINGS_FILE = '../../settings.yaml'
settings = yaml.safe_load(open(SETTINGS_FILE, 'r').read())

# Get access to tobii live video streaming: rtsp://130.237.67.195:8554/live/eyes or scene


# Procees input data
def callback(_mq, get_shifted_time, routing_key, body):
    #json.load
    print(body)
    print("-------------------------------------------------")


mq = MessageQueue('mocaptobii_processing')

#mq.bind_queue(exchange='pre-processor', routing_key="{}.*".format(settings['messaging']['mocap_processing']), callback=callback)
mq.bind_queue(exchange='pre-processor',
              routing_key="{}.*".format(
                  settings['messaging']['tobii_processing']),
              callback=callback)

print('[*] Waiting for messages. To exit press CTRL+C')
mq.listen()
import zmq
import sys
sys.path.append('..')
from shared import MessageQueue, resend_new_sensor_messages
from threading import Thread

mq = MessageQueue('zmq-server-keeper')

connections = {}


def callback(_mq, get_shifted_time, routing_key, body):
    splitted_key = routing_key.split('.')
    if len(splitted_key) > 1:
        if routing_key.split('.')[1] == 'new_sensor':
            # save the routing key into the dict, e.g. microphone.new_sensor.blue
            connections[routing_key] = body
    else:
        if routing_key.split('.')[
                1] == 'disconnected_sensor' and connections.get(routing_key):
            del connections[routing_key.replace('disconnected_sensor',
                                                'new_sensor')]


mq.bind_queue(exchange='sensors',
              routing_key="*.new_sensor.*",
              callback=callback)
mq.bind_queue(exchange='sensors',
              routing_key="*.disconnected_sensor.*",
              callback=callback)
Пример #6
0
import msgpack
import cv2
import sys
import zmq
import subprocess as sp
import numpy as np
sys.path.append('../..')
from shared import create_zmq_server, MessageQueue

zmq_socket, zmq_server_addr = create_zmq_server()

if len(sys.argv) != 2:
    exit('error. python video.py [color]')
participant = sys.argv[1]

mq = MessageQueue('video-scren_capture-sensor')
mq.publish(exchange='sensors',
           routing_key='video.new_sensor.{}'.format(participant),
           body={
               'address': zmq_server_addr,
               'file_type': 'ffmpeg-video',
               'img_size': {
                   'width': 1280,
                   'height': 720,
                   'channels': 3,
                   'fps': 30,
               }
           })

command = [
    'ffmpeg', '-y', '-f', 'avfoundation', '-framerate', '30', '-i', '0', '-f',
Пример #7
0
import time
import msgpack
sys.path.append('../../')
# from create_zmq_server import create_zmq_server
from GazeSense import GazeSenseSub
from shared import create_zmq_server, MessageQueue

if len(sys.argv) != 2:
    exit('error. python kinect.py [participant]')
participant = sys.argv[1]

KINECT_STREAM_TIMEOUT = 99999.0  # the amount of time data from the Kinect will be sent

zmq_socket, zmq_server_addr = create_zmq_server()

mq = MessageQueue('kinect-sensor')

mq.publish(exchange='sensors',
           routing_key='kinect.new_sensor.{}'.format(participant),
           body={
               'address': zmq_server_addr,
               'file_type': 'txt'
           })


def my_callback(data):
    zmq_socket.send(msgpack.packb((data, mq.get_shifted_time())))


gc = GazeSenseSub(callback=my_callback, verbose=True)
Пример #8
0

# Procees input data
def callback(_mq, get_shifted_time, routing_key, body):
    print('connected!', body)

    context = zmq.Context()
    s = context.socket(zmq.SUB)
    s.setsockopt_string(zmq.SUBSCRIBE, '')
    s.connect(body.get('address'))

    while True:
        data = s.recv()
        msgdata, timestamp = msgpack.unpackb(data, use_list=False)
        print(msgdata)
        cv2.imshow('frame', np.array(msgdata))
        if cv2.waitKey(1) & 0xFF == ord('q'):
            break

        #ch.basic_publish(exchange='pre-processor', routing_key='asr_incremental.data.{}'.format(participant), body=data)
    s.close()


mq = MessageQueue('facial-features-preprocessor')
mq.bind_queue(exchange='sensors',
              routing_key='video.new_sensor.*',
              callback=callback)

print('[*] Waiting for messages. To exit press CTRL+C')
mq.listen()
                }

                key = settings['messaging']['tobii_processing']
                new_routing_key = "{key}.{participant}".format(
                    key=key, participant=pname)
                _mq.publish(exchange='pre-processor',
                            routing_key=new_routing_key,
                            body=json_data)

                return

            # Send for every participant
            sendjson(pname)
        s.close()

    participant = routing_key.rsplit('.', 1)[1]
    thread = Thread(target=run, args=(participant, _mq))
    thread.deamon = True
    thread.start()


mq = MessageQueue('tobii-preprocessor')

mq.bind_queue(exchange='sensors',
              routing_key="{}.*".format(
                  settings['messaging']['new_sensor_tobii']),
              callback=callback)

print('[*] Waiting for messages. To exit press CTRL+C')
mq.listen()
import wave
import datetime

if len(sys.argv) != 2:
    exit('please only supply sound card name')
device_names_string = sys.argv[1]

FORMAT = pyaudio.paInt16
CHANNELS = 2
RATE = 44100
CHUNK = 2000

zmq_socket_1, zmq_server_addr_1 = create_zmq_server()
zmq_socket_2, zmq_server_addr_2 = create_zmq_server()

mq = MessageQueue('microphone-sensor')

p = pyaudio.PyAudio()
device_index = None
for i in range(p.get_device_count()):
    device = p.get_device_info_by_index(i)
    if device['name'].startswith('[{}]'.format(device_names_string)):
        device_index = i

if not device_index:
    exit('please connect a proper soundcard')

device_names = device_names_string.split(',')

mq.publish(exchange='sensors',
           routing_key='microphone.new_sensor.{}'.format(device_names[0]),
                'confidence':
                msg['results'][0].get('alternatives',
                                      [{}])[0].get('confidence')
            }
            if msg["results"][0]["final"]:
                self.timer = None
            self.on_message_callback(data)


def callback(_mq, get_shifted_time, routing_key, body):
    participant = routing_key.rsplit('.', 1)[1]
    print('connected {}'.format(routing_key))

    def on_message(data):
        if DEBUG: print(data)
        routing_key = 'asr.data' if data["final"] else 'asr.incremental_data'
        _mq.publish(exchange='pre-processor',
                    routing_key='{}.{}'.format(routing_key, participant),
                    body=data)

    WatsonASR(body.get('address'), recognition_method_url, token, on_message)


mq = MessageQueue('watson-asr-preprocessor')
mq.bind_queue(exchange='sensors',
              routing_key='microphone.new_sensor.*',
              callback=callback)

print('[*] Waiting for messages. To exit press CTRL+C')
mq.listen()
Пример #12
0
                print('done writing.....')
                trying = 0
                break
            except:
                print('failed')
                time.sleep(trying / 2)
                trying -= 1
        print('{} writes left to do..', q.qsize())
    conn.close()
    data.close()
    print('writer closed'.format(log_file))


thread = Thread(target=storage_writer)
thread.deamon = True
thread.start()

mq = MessageQueue('logger')
mq.bind_queue(exchange='sensors',
              routing_key='*.new_sensor.*',
              callback=callback)

thread2 = Thread(target=mq.listen)
thread2.deamon = True
thread2.start()

input('[*] Waiting for messages. To exit press enter')
running = False
print('ugly hack: now press CTRL-C')
mq.stop()
Пример #13
0
from flask import Flask, render_template, request
import pika
import json
import sys
import random
from threading import Thread
from flask_socketio import SocketIO, send, emit
sys.path.append('..')
from shared import create_zmq_server, MessageQueue

app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app)
mq = MessageQueue('wizard')


@socketio.on("say")
def handle_say(json):
    mq.publish(exchange='wizard',
               routing_key='action.say',
               body={'text': json.get('text', '')},
               no_time=True)


@app.route('/say')
def say():
    mq.publish(exchange='wizard',
               routing_key='action.say',
               body={'text': request.args.get('text', '')},
               no_time=True)
    return 'OK'
                        "glove_left": {},
                        "glove_right": {}
                    }

                    key = settings['messaging']['mocap_processing']
                    new_routing_key = "{key}.{participant}".format(
                        key=key, participant=participantname)
                    _mq.publish(exchange='pre-processor',
                                routing_key=new_routing_key,
                                body=json_data)

                    return

                # Send for every participant
                sendjson('white')
                sendjson('pink')
                sendjson('blue')
                sendjson('orange')
                sendjson('brown')
                sendjson('black')
    s.close()


mq = MessageQueue('mocap-preprocessor')
mq.bind_queue(exchange='sensors',
              routing_key=settings['messaging']['new_sensor_mocap'],
              callback=callback)

print('[*] Waiting for messages. To exit press CTRL+C')
mq.listen()
#     cv2.destroyAllWindows()
#
#
#
fourcc = cv2.VideoWriter_fourcc(*'MP4V')

camera = cv2.VideoCapture(camera_id)
width = camera.get(cv2.CAP_PROP_FRAME_WIDTH)  # float
height = camera.get(cv2.CAP_PROP_FRAME_HEIGHT)  # float

session_name = datetime.datetime.now().isoformat().replace('.', '_').replace(
    ':', '_')
out = cv2.VideoWriter('{}.mp4'.format(session_name), fourcc, 30.0,
                      (int(width), int(height)))

mq = MessageQueue('video-webcam-sensor')
mq.publish(exchange='sensors',
           routing_key='video.new_sensor.{}'.format(participant),
           body={
               'address': zmq_server_addr,
               'file_type': 'cv-video',
               'img_size': {
                   'width': width / 2,
                   'height': height / 2,
                   'channels': 3,
                   'fps': 30,
               }
           })
print('[*] Serving at {}. To exit press enter'.format(zmq_server_addr))
try:
    while True:
        """
		Sets or resets features to zero, used after processing of sequence.
		"""
        self.blink = 0
        self.mouth_open = 0
        self.inner_brow_raise = 0
        self.outer_brow_raise = 0
        # TODO: Add highlevel features


participants = defaultdict(FaceProcessor)


def callback(_mq, get_shifted_time, routing_key, body):
    participant = routing_key.rsplit('.', 1)[1]
    data = participants[participant].collect_frame(body)
    if data:
        data["timestamps"] = body["timestamps"]
        _mq.publish(exchange=settings["messaging"]["environment"],
                    routing_key='faceprocessor.{}'.format(participant),
                    body=data)


mq = MessageQueue('face_processor')
mq.bind_queue(exchange=settings["messaging"]["pre_processing"],
              routing_key='openface.data.*',
              callback=callback)

print('[*] Waiting for messages. To exit press CTRL+C')
mq.listen()
            target_likelihoods[target] = likelihood

    participant = routing_key.rsplit('.', 1)[1]
    data = {
        "participant": participant,
        "dialogue-acts" : da_label_targets
	}
    _mq.publish(exchange=env_exchange_name,
                routing_key=settings["messaging"]["dialogue_acts"] + ".{}".format(participant),
                body=data)

if __name__ == "__main__":
    player_names = set(player["name"] for player in settings["players"])
    print("Player usernames to use during keyword detection: %s", player_names, file=sys.stderr)
    phrase_da_labeller = create_phrase_da_labeller("phrase_da_labels.tsv", player_names)

	# Testing -----------------------------------------------
#	input = "I accuse orange because she has been talking a lot but white is a villager"
#	print("Test input: %s" % input)
#	input_tokens = WHITESPACE_PATTERN.split(input)
#	da_labels = phrase_da_labeller.find_phrase_da_labels(input_tokens)
#	print(da_labels)
#	da_targets = DialogueActTargets(da_labels)
#	print(da_targets)
	# -------------------------------------------------------

    mq = MessageQueue('keyword-dialogue-act-processor')
    mq.bind_queue(exchange='pre-processor', routing_key="{}.*".format(settings['messaging']['asr_watson']), callback=callback)
    print('[*] Waiting for messages. To exit press CTRL+C')
    mq.listen()
settings = yaml.safe_load(open(SETTINGS_FILE, 'r').read())


session_name = datetime.datetime.now().isoformat().replace('.', '_').replace(':', '_')

log_path = os.path.join(settings['logging']['asr_path'], session_name)

os.mkdir(log_path)


# Procees input data
def callback(ch, method, properties, body):
    # participant = routing_key.rsplit('.', 1)[1]
    path = os.path.join(log_path, '{}.txt'.format(method.routing_key))
    with open(path, 'ab') as f:
        f.write(msgpack.packb((method.exchange, method.routing_key, body)))
    print(method.exchange, method.routing_key, body)
    print("-------------------------------------------------")

mq = MessageQueue('asr-logger')

mq.bind_queue(exchange=settings['messaging']['pre_processing'], routing_key="*.*.*", callback_wrapper_func=callback)
mq.bind_queue(exchange=settings['messaging']['sensors'], routing_key="*.*.*", callback_wrapper_func=callback)
mq.bind_queue(exchange=settings['messaging']['wizard'], routing_key="*.*.*", callback_wrapper_func=callback)
mq.bind_queue(exchange=settings['messaging']['environment'], routing_key="*.*.*", callback_wrapper_func=callback)
mq.bind_queue(exchange=settings['messaging']['fatima'], routing_key="*.*.*", callback_wrapper_func=callback)


print('[*] Waiting for messages. To exit press CTRL+C')
mq.listen()
import pika
import msgpack
import thread
import numpy as np
import matplotlib.animation as animation
import matplotlib.pyplot as plt
import matplotlib as mpl
import plotly.plotly as py
import json
import plotly.tools as tls
from numpy.random import random
sys.path.append('../..')
from shared import MessageQueue


mq = MessageQueue('kinect-listener')

# define initial plotting stuff
target_gaze_dic = {}
time_gaze_dict = {}
old_rt_data = ""


def callback(_mq, get_shifted_time, routing_key, body):

    """

    :param _mq:
    :param get_shifted_time:
    :param routing_key:
    :param body: json object containg address of the 0MQ server and file type of sensor
Пример #20
0
import yaml

# Get platform
if len(sys.argv) != 2:
    exit('Error.')
platform = sys.argv[1]

# Print messages
DEBUG = False

# Settings
SETTINGS_FILE = '../../settings.yaml'

# Define server
zmq_socket, zmq_server_addr = create_zmq_server()
mq = MessageQueue('mocap-sensor')

# Estabish la conneccion!
settings = yaml.safe_load(open(SETTINGS_FILE, 'r').read())
mq.publish(exchange='sensors',
           routing_key=settings['messaging']['new_sensor_mocap'],
           body={
               'address': zmq_server_addr,
               'file_type': 'txt'
           })

# Get mocap data stream
if platform == 'mac':
    process = Popen([
        './vicon_mac/ViconDataStreamSDK_CPPTest',
        settings['messaging']['mocap_host']