예제 #1
0
    def __init__(self, IP = '127.0.0.1' , port = 7000, initial_port_number = 9000):

        """
        Creates a new master object for service discovery

        Parameters
        ----------

        IP : string
            IP value of server

        port : int
            Port used to connect the socket
            
        initial_port_number : int
            Port numbers will be registered starting from the value of this parameter

        """

        # Start master server
        try:
            self.nodes_register = {}
            self.server = nep.server(IP,port)
            self.IP = IP
            self.initial_port_number = initial_port_number

            # Ports configuration
            self.current_port = initial_port_number
            self.topic_register = {}
            self.threads_id = []
            self.brokers = []
            msg_type = "json"         # Message type to listen. "string" or "dict"
            node = nep.node("master_node")                                                                   # Create a new node
            pub_port = 7777
            conf = node.direct(ip = IP, port =  pub_port, mode ="one2many")                # Select the configuration of the publisher
            self.master_pub = node.new_pub("master",msg_type,conf)
            time.sleep(1)
            self.master_pub.publish(self.topic_register)
        except Exception as e:
            print("MASTER ALREADY OPEN")
            sys.exit(0)
from utils.inference import load_detection_model
from utils.preprocessor import preprocess_input
import nep
import threading
import sys
import time

show_image = 1

try:
    show_image = int(sys.argv[1])
    print("Show image: " + str(show_image))
except:
    pass

node = nep.node('emotion_recognition')
sub_image = node.new_sub('robot_image', 'image')
sub_position = node.new_sub('face_positions', 'json')
pub_emotion = node.new_pub('/blackboard', 'json')
myImage = cv2.imread("x.jpg")  # Temporal image

#classes = ["angry", "fear", "sad", "happy", "surprise", "neutral"]
#object_per = sharo.ObjectPerception(node, classes)


def thread_function(name):  # Get images as soon as possible
    global myImage, sub_image
    while True:
        s, img = sub_image.listen()
        if s:
            myImage = cv2.resize(img, (640, 480), interpolation=cv2.INTER_AREA)
예제 #3
0
import nep
import time
import sys

msg_type = "string"         # Message type to listen. 

node = nep.node("publisher_sample","ROS2")               # Create a new node
pub = node.new_pub("pub_sub_test",msg_type)        # Set the topic and the configuration of the publisher

# Publish a message each second
while True: 
	msg = "hello world"
        print ("sending: " + msg)
        pub.publish(msg) 
        time.sleep(1)
 
    robot_name = robot_info["name"]
    robot_ip = robot_info["ip"]
    robot_port = robot_info["port"]
    middleware = robot_info["middleware"]

    print("Robot name:" + str(robot_name))
    print("Robot IP to connect:" + str(robot_ip))
    print("Robot PORT to connect:" + str(robot_port))
    print("Middleware:" + str(middleware))

except:
    pass

behavior = nep_aldebaran.BehaviorManager(robot_ip, robot_port)
behavior.onStop()
autonomus = nep_aldebaran.AutonomusLife(robot_ip, robot_port)
autonomus.onStop()
print("Robot ready")

name_ = "reset_robot"
node = nep.node(name_)
pub = node.new_pub("/set_speech", "json")
time.sleep(1)

pub.publish({"action": "stop"})
track = nep_aldebaran.Tracking(robot_ip, robot_port)
track.onStop()
time.sleep(1)

print("Robot reset complete")
예제 #5
0
    print('I connected on ' + ip_port)

@sioClient.on('data')
def on_message(data):
    print('I received a message!'+ data)
    server_data = data
    


if __name__ == "__main__":

#	ip = raw_input("Enter your ip : ") 
#	port = raw_input("Enter your port : ") 
#	sio.connect('http://' + ip + ':' + port)
	

	sioClient.connect(ip_port)

	node = nep.node('python_client', 'ROS')
	pub = node.new_pub('python_client',msg_type = "string")



	while True:
		# Here is your code ...
		#msg = {'message':'hello'}	# An example of message
		msg = "hello"
		pub.publish(msg)		# Send message

		time.sleep(1)
예제 #6
0
import nep
import time

node = nep.node("surveyor_test")
sur = node.new_surveyor('surveyor_sample', timeout=1000)
while True:
    x = {"node": "action"}
    print("data to send:" + str(x))
    sur.send_json(x)
    s, msg = sur.listen_json()
    if s:
        print("data read" + str(msg))
    else:
        print("respondent not connected")
    time.sleep(1)
예제 #7
0
import nep 
import time 

node = nep.node("client_test")
client = node.new_client("client_server_sample")


while True:
    msg = {"message":"client request"} # Message to send as request
    client.send_info(msg)   # Send request
    print (client.listen_info()) # Wait for server response
    time.sleep(1) # Wait one second
    ip = sys.argv[1]
    print ("resolution: " + sys.argv[2])
    resolution = int(sys.argv[2])
except:
    pass

### Image format
##k960p = 3                # 1280*960
##k4VGA = 3                # 1280*960
##kVGA = 2                 # 640*480
##kQVGA = 1                # 320*240
##kQQVGA = 0               # 160*120

video_service = ALProxy("ALVideoDevice", ip, 9559)

node = nep.node("pepper_image")
pub = node.new_pub("robot_image", "image")

#video_service = session.service("ALVideoDevice")
#colorSpace = vision_definitions.kRGBColorSpace
colorSpace = 11

#colorSpace = vision_definitions.kBGRColorSpace
#colorSpace = vision_definitions.kRGBColorSpace

##img.header.frame_id = self.frame_id
##img.height = image[1]
##img.width = image[0]
##nbLayers = image[2]
##if image[3] == kYUVColorSpace:
##    encoding = "mono8"
예제 #9
0
# This example does not need rosbridge, it directly publish to rize

# Add these lines to your python script
import nep
import time
# (For ROS 1.0 use)
node = nep.node('python_sender', 'ROS')

# (For ROS 2.0 use)
#node = nep.node('python_sender','ROS2')

pub = node.new_pub('/blackboard', 'json')


# Dummy perceptual function
def isHeadTouched():
    return true


# Publish the current state when
while True:
    # Here your code that recognize something
    # Example:
    head_touched = isHeadTouched()
    if head_touched:
        msg = {'primitive': 'touched', 'input': {"head": 1}, "robot": "ROS"}
        pub.publish(msg)
        print("send perception ...")
    time.sleep(10)

# In {'primitive':'touched', 'input':{"head":1}, "robot":"ROS"} -- {"head":1} means 100% probability that is touched
예제 #10
0
import nep  # Import nep library
node = nep.node("server_test")
server = node.new_server("client_server_sample")

while True:
    msg = {"message": "hello"}  # Message to send as response
    request = server.listen_info()  # Wait for client request
    server.send_info(msg)  # Send server response
    print(request)
flip = 0

try:
    print(sys.argv[1])
    id_ = int(sys.argv[1])
    print("Camera id:" + str(id_))
    x_resolution = int(sys.argv[2])
    print("X:" + str(x_resolution))
    y_resolution = int(sys.argv[3])
    print("Y:" + str(y_resolution))
    flip = int(sys.argv[4])
    print("flip:" + str(flip))
except:
    pass

node = nep.node('usb_camera')
pub_image = node.new_pub('robot_image', 'image')

video = cv2.VideoCapture(id_)
video.set(cv2.CAP_PROP_FRAME_WIDTH, x_resolution)
video.set(cv2.CAP_PROP_FRAME_HEIGHT, y_resolution)

print('Start Video recording!')

while True:
    success, frame = video.read()
    # Display the resulting image
    if (x_resolution > 640):
        dim = (640, 480)
        frame = cv2.resize(frame, dim, interpolation=cv2.INTER_AREA)
    if (flip == 1):
예제 #12
0
import nep

msg_type = "json"  # Message type to listen. "string" or "json"

node = nep.node("subscriber_sample", "nanomsg")  # Create a new node
conf = node.broker(
    mode="one2many")  # Select the configuration of the subscriber
sub = node.new_sub(
    "pub_sub_test_nn", msg_type,
    conf)  # Set the topic and the configuration of the subscriber

while True:
    s, msg = sub.listen()  # Non blocking socket
    if s:  # Info avaliable only if s == True
        print(str(msg))
예제 #13
0
import nep
import time
import sys

msg_type = "json"         # Message type to listen. "string" or "json"

node = nep.node("publisher_sample", "nanomsg")             # Create a new node
conf = node.broker(mode = "one2many")           # Select the configuration of the publisher
pub = node.new_pub("pub_sub_test_nn",msg_type,conf)        # Set the topic and the configuration of the publisher

# Publish a message each second
while True: 
    #  --- String example ---
    if msg_type == "string":
        msg = "hello world"
        print ("sending: " + msg)
        pub.publish(msg) 
        time.sleep(1)
    # --- JSON example ---
    if msg_type == "json":
        msg =  data = {"node":"perception", "primitive":"speech", "input":"add", "robot":"pepper", "parameters":"0"}
        print ("sending: " + str(msg))
        pub.publish(msg) 
        time.sleep(1)
예제 #14
0
else:
    inpWidth = 300  #Width of network's input image
    inpHeight = 300  #Height of network's input image
    classesFile = path_models + "coco_ms.names"

classes = None
with open(classesFile, 'rt') as f:
    classes = f.read().rstrip('\n').split('\n')
print(classes)

# Lock primitive for securely accessing a shared variable
lock = threading.Lock()

# ---------- Nep -----------------

node = nep.node("object_recognition")
sub_img = node.new_sub("robot_image",
                       "image")  # Set the topic and the configuration
object_per = sharo.ObjectPerception(node, classes)
print("usb source")

pub_image = node.new_pub('robot_image_recognition', 'image')
myImage = cv2.imread("x.jpg")  # Temporal image


def thread_function(name):  # Get images as soon as possible
    global myImage
    while True:
        s, img = sub_img.listen()
        if s:
            lock.acquire()
예제 #15
0
import nep
import time

node = nep.node('sender')
conf = node.direct(ip = '127.0.0.1', port =  3000, mode ='one2many')
pub = node.new_pub('test_subcriber','json',conf)

while True:
	# Here is your code ...
	msg = {'message':'hello'}	# An example of message
	pub.publish(msg)		# Send message
	time.sleep(.01)
예제 #16
0
# This example does not need rosbridge, it directly publish to rize

# Add these lines to your python script (Python 2 or 3 is ok)
import nep
import time
node = nep.node('python_sender')
pub = node.new_pub('/blackboard','json')

# Dummy perceptual function
def isHeadTouched():
	return True
# Publish the current state when 
while True:
	# Here your code that recognize something
	# Example: 
	head_touched = isHeadTouched()
	if head_touched:
		msg = {'primitive':'touched', 'input':{"head":1}, "robot":"ROS"}	
		pub.publish(msg)
		print("send perception ...")
	time.sleep(10)

# In {'primitive':'touched', 'input':{"head":1}, "robot":"ROS"} -- {"head":1} means 100% probability that is touched 


예제 #17
0
import nep

sub_type = "normal"                 # Type of subscriber. "callback" or "normal"
msg_type = "json"                   # Message type to listen. "string" or "dict"

node = nep.node("subscriber_sample")                                                        # Create a new node
conf = node.direct(ip = "127.0.0.1", port =  3000, mode ="one2many")                         # Select the configuration of the subscriber

def callback(msg):
    print ("Callback get")
    print (msg)

if sub_type == "normal":            # Listen info inside a while loop
    sub = node.new_sub("test_subcriber", msg_type, conf) 
    while True:
        s, msg = sub.listen()       # Non blocking socket
        if s:                       # Info avaliable only if s == True
            print(msg)

elif sub_type == "callback":        # Listen info inside a event loop

    sub = node.new_callback("test_subcriber", msg_type , callback, conf)
    node.spin()

    pass


예제 #18
0
import numpy as np
import os, sys
import pickle
import nep
import threading
import sharo

show_image = 1
try:
    print (sys.argv[1])
    show_image = int(sys.argv[1])
    print ("Show image: " + show_image)
except:
    pass

node = nep.node('face_detection')
sub_image = node.new_sub('robot_image', 'image')
pub_position = node.new_pub('face_positions', 'json')

perception_face = sharo.BooleanPerception(node, "human_detected", "value", 1, 3)  # --------------- Sharo ------------------
frame = ""
bounding_boxes = ""


def thread_function(name):
    global sub_image, frame
    while True:
        s, img = sub_image.listen()
        if s:
            frame = cv2.resize(img, (640,480), interpolation = cv2.INTER_AREA)