Example #1
0
def send_with_timer(picam, sender, jpeg_quality, patience_seconds):
    while True:  # send images as stream until Ctrl-C or until stall out
        image = picam.read()
        ret_code, jpg_buffer = cv2.imencode(
            ".jpg", image, [int(cv2.IMWRITE_JPEG_QUALITY), jpeg_quality])
        try:
            with ThreadedTimer(patience_seconds):
                reply = sender.send_jpg(threaded_timer, jpg_buffer)
        except ThreadedTimer.Timeout:  # if no timely response from hub
            print('During image send in threaded_timer test,')
            print('No REP received back for', patience_seconds, 'seconds.')
            print('Ending sending program.')
            sys.exit()


sender = imagezmq.ImageSender(connect_to=connect_to)
picam = VideoStream(usePiCamera=usePiCamera,
                    resolution=(640, 480),
                    framerate=32).start()
time.sleep(2.0)  # allow camera sensor to warm up
jpeg_quality = 95  # 0 to 100, higher is better quality, 95 is cv2 default
if not SEND_METHOD_CHECKING:  # No stall checking
    send_method = send_with_no_checking
    print("Sending with no stall checking.")
    print("...therefore MUST end by Ctrl-C.")
elif SEND_METHOD_CHECKING == 'SIGALRM':
    send_method = send_with_sigalrm
    print("Sending with SIGALRM checking.")
elif SEND_METHOD_CHECKING == 'threaded_timer':
    send_method = send_with_timer
    print("Sending with threaded_timer checking.")
Example #2
0
import imagezmq
import socket
from time import sleep
from VideoStream import Stream
#import cv2

SERVER_IP = "10.3.12.31"  #"12.92.11.78" #Change this

#Initialize Sender Object for the Server
print("Connecting to Server...")
Sender = imagezmq.ImageSender(connect_to="tcp://{}:5580".format(SERVER_IP))
Sender2 = imagezmq.ImageSender(connect_to="tcp://{}:5555".format(SERVER_IP))
print("Connection Established.")

#Obtain Hostname, initialize Video Stream, and Warm Up the Camera
CamName = socket.gethostname()
vStream = Stream(Rpi=True).Start()
sleep(3)

#Start the Video Stream
while True:
    #Obtain Video Frame and send it to the Server (Backend)
    Frame = vStream.ReadFrame()
    Sender.send_image(CamName, Frame)
    Sender2.send_image(CamName, Frame)
    #cv2.imshow("Test", Frame)
    #cv2.waitKey(1)
Example #3
0
# import the necessary packages
from imutils.video import VideoStream
import imagezmq
import argparse
import socket
import time
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument(
    "-s",
    "--server-ip",
    required=True,
    help="ip address of the server to which the client will connect")
args = vars(ap.parse_args())
# initialize the ImageSender object with the socket address of the
# server
sender = imagezmq.ImageSender(connect_to="tcp://15.165.10.13:5555")

# get the host name, initialize the video stream, and allow the
# camera sensor to warmup
rpiName = socket.gethostname()
vs = VideoStream(usePiCamera=True, resolution=(320, 240)).start()
#vs = VideoStream(src=0).start()
time.sleep(2.0)

while True:
    # read the frame from the camera and send it to the server
    frame = vs.read()
    sender.send_image(rpiName, frame)
    camera.close()
from imutils.video import VideoStream
import imagezmq

path = "rtsp://192.168.1.77:8080//h264_ulaw.sdp"  # change to your IP stream address
cap = VideoStream(path)

sender = imagezmq.ImageSender(
    connect_to='tcp://localhost:5566'
)  # change to IP address and port of server thread
cam_id = '1'

stream = cap.start()

while True:

    frame = stream.read()
    sender.send_image(cam_id, frame)
Example #5
0
import socket
import time
import cv2
import requests as r

# construct the argument parser and parse the arguments
# ap = argparse.ArgumentParser()
# ap.add_argument("-s", "--server-ip", required=True,
# 	help="ip address of the server to which the client will connect")
# ap.add_argument("-c", "--camera-ip", required=True, help="ip address of the camera on the local network to get a feed from")
# args = vars(ap.parse_args())

# initialize the ImageSender object with the socket address of the
# server
server_ip = "3.14.117.253"
sender = imagezmq.ImageSender(connect_to="tcp://{}:5555".format(server_ip))

cameras = r.get("http://3.14.117.253:5000/camera")
camera_ip = []
camera_name = []

for c in cameras.json():
    camera_ip.append(c['ip'])
    camera_name.append(c['nickname'])

while True:
    # read the frame from the camera and send it to the server
    for i, j in zip(camera_ip, camera_name):
        rpiName = j
        if (i in "10.0.0.170"):
            feed = cv2.VideoCapture("http://" + str(i) + ":5000/video_feed")
Example #6
0
from imutils.video import VideoStream
import imagezmq
import socket
import imutils

# path = "rtsp://172.16.689.203:8080/h264_ulaw.sdp"
path = "rtsp://*****:*****@172.16.68.146:554/Streaming/Channels/101/"

cap = VideoStream(path)

sender = imagezmq.ImageSender(connect_to='tcp://localhost:5555')
cam_id = socket.gethostname()

stream = cap.start()

while True:
    frame = stream.read()
    if frame is None:
        break
    frame = imutils.resize(frame, width=720)
    sender.send_image(cam_id, frame)
Example #7
0
PiCamera continuously to a receiving program on a Mac that will display the
images as a video stream. Images are converted to jpg format before sending.

This program requires that the image receiving program be running first. Brief
test instructions are in that program: test_3_mac_receive_jpg.py.
"""

# import imagezmq from parent directory
import sys
sys.path.insert(0, '../imagezmq')  # imagezmq.py is in ../imagezmq

import socket
import time
import cv2
from imutils.video import VideoStream
import imagezmq

# use either of the formats below to specifiy address of display computer
sender = imagezmq.ImageSender(connect_to='tcp://jeff-macbook:5555')
# sender = imagezmq.ImageSender(connect_to='tcp://192.168.1.190:5555')

rpi_name = socket.gethostname()  # send RPi hostname with each image
picam = VideoStream(usePiCamera=True).start()
time.sleep(2.0)  # allow camera sensor to warm up
jpeg_quality = 95  # 0 to 100, higher is better quality, 95 is cv2 default
while True:  # send images as stream until Ctrl-C
    image = picam.read()
    ret_code, jpg_buffer = cv2.imencode(
        ".jpg", image, [int(cv2.IMWRITE_JPEG_QUALITY), jpeg_quality])
    sender.send_jpg(rpi_name, jpg_buffer)
Example #8
0
# Initialize frame rate calculation
frame_rate_calc = 1
freq = cv2.getTickFrequency()

dnum=0
time_appear_drone = 0
boxthickness = 3
linethickness = 2
# Initialize video stream
#videostream = VideoStream(resolution=(800,600),framerate=30).start()
Auto_flag = True
rectangule_color = (10, 255, 0)


sender = imagezmq.ImageSender(connect_to="tcp://{}:5555".format(args.server))
rpi_name = socket.gethostname() # send RPi hostname with each image
picam = VideoStream(resolution=(imW,imH),framerate=30).start()
print(picam.read().shape)
time.sleep(1.0)  # allow camera sensor to warm up
frame1 = picam.read()
rows, cols, _ = frame1.shape
x_medium = int(cols / 2)
x_center = int(cols / 2)
y_medium = int(rows / 2)
y_center = int(rows / 2)

while True:  # send images as stream until Ctrl-C
  # Start timer (for calculating frame rate)
  t1 = cv2.getTickCount()
  frame1 = picam.read()
Example #9
0
import socket
import time
from imutils.video import VideoStream
import imagezmq
import cv2

sender = imagezmq.ImageSender('tcp://Tianyus-MBP:5555')

rpi_name = socket.gethostname()  # send RPi hostname with each image
# picam = VideoStream(usePiCamera=False).start()

vid = cv2.VideoCapture(0)

time.sleep(2.0)
while True:
    # image = picam.read()
    ret, image = vid.read()
    cv2.imshow(rpi_name, image)
    cv2.waitKey(1)
    sender.send_image(rpi_name, image)
# import imagezmq from parent directory
import sys
sys.path.insert(0, '../imagezmq')  # imagezmq.py is in ../imagezmq

import socket
import time
import traceback
import cv2
from imutils.video import VideoStream
import imagezmq
import RPi.GPIO as GPIO

# use either of the formats below to specifiy address of display computer
# sender = imagezmq.ImageSender(connect_to='tcp://jeff-macbook:5555')
sender = imagezmq.ImageSender(connect_to='tcp://192.168.1.190:5555')

# optionally, turn on the LED area lighting
use_led = False  # set to True or False as needed

if use_led:
    GPIO.setmode(GPIO.BCM)
    GPIO.setup(18, GPIO.OUT)
    GPIO.output(18, True)

rpi_name = socket.gethostname()  # send RPi hostname with each image
picam = VideoStream(usePiCamera=True).start()
time.sleep(2.0)  # allow camera sensor to warm up
try:
    while True:  # send images as stream until Ctrl-C
        image = picam.read()
Example #11
0
import socket
import time
# from mss.windows import MSS as mss
from PIL import ImageGrab

jpegQuality = 95
imageDifference = 50000
fpsMax = 15
resolution = 1080
monitor = 0
FPS = True

# sct =  mss()

# initialize the ImageSender object with the socket address of the server
sender_0 = imagezmq.ImageSender(
    connect_to="tcp://{}:5555".format("192.168.8.10"))
sender_1 = imagezmq.ImageSender(
    connect_to="tcp://{}:5555".format("192.168.8.11"))
sender_2 = imagezmq.ImageSender(
    connect_to="tcp://{}:5555".format("192.168.8.12"))
sender_3 = imagezmq.ImageSender(
    connect_to="tcp://{}:5555".format("192.168.8.13"))

# Start screen grab process

# get the host na=me,
hostName = socket.gethostname()

# initialize
numFrames = 0
lastFPSTime = cv2.getTickCount()
Example #12
0
    parser.add_argument('--stream', action='store_true')
    parser.add_argument('--camera', action='store_true')
    args = parser.parse_args()

    if args.motor:
        from motor_control import *
        print("motor run")
    
    # CAMSET = "nvarguscamerasrc sensor-id=0 tnr-strength=1 tnr-mode=2 ! video/x-raw(memory:NVMM), width=1640, height=1232, framerate=30/1, format=NV12 ! nvvidconv flip-method=2 ! video/x-raw, width=640, height=480, format=BGRx ! videoconvert ! video/x-raw, format=BGR ! appsink"
    # CAMSET = "nvarguscamerasrc sensor-id=0 tnr-strength=1 tnr-mode=2 ! video/x-raw(memory:NVMM), width=1280, height=720, framerate=120/1, format=NV12 ! nvvidconv flip-method=2 ! video/x-raw, width=640, height=480, format=BGRx ! videoconvert ! video/x-raw, format=BGR ! appsink"
    # CAMSET = "nvarguscamerasrc sensor-id=0 tnr-strength=1 tnr-mode=2 ! video/x-raw(memory:NVMM), width=1280, height=720, framerate=120/1, format=NV12 ! nvvidconv flip-method=2 ! video/x-raw, width=640, height=480, format=BGRx ! videoconvert ! video/x-raw, format=BGR ! appsink"
    CAMSET = "nvarguscamerasrc sensor-id=0 tnr-strength=1 tnr-mode=2 ! video/x-raw(memory:NVMM), width=1640, height=1232, framerate=30/1, format=NV12 ! nvvidconv flip-method=2 ! video/x-raw, width=320, height=240, format=BGRx ! videoconvert ! video/x-raw, format=BGR ! appsink"

    if args.stream:
        IMAGE_PORT = 5555
        sender = imagezmq.ImageSender("tcp://*:{}".format(IMAGE_PORT), REQ_REP=False)
        jpeg_quality = 60
        rpi_name = socket.gethostname()
        print("Input stream opened")

    try:
        with open("lane_config.json") as f:
            data = json.load(f)
        lowerY = data["W_hue_min"],data["W_sat_min"],data["W_val_min"]
        upperY = data["W_hue_max"],data["W_sat_max"],data["W_val_max"]
        lane_ROI = np.array([[
            (data["X1_lane"],data["Y1_lane"]),
            (data["X2_lane"],data["Y2_lane"]),
            (data["X3_lane"],data["Y3_lane"]),
            (data["X4_lane"],data["Y4_lane"])
        ]],np.int32)
Example #13
0
This program can turn an LED on and off if needed; assumes BCM pin 18. This
can help with lighting the subject area in front of the PiCamera.
"""

import sys

import socket
import time
import traceback
import cv2
from imutils.video import VideoStream
import imagezmq

# use either of the formats below to specifiy address of display computer
sender = imagezmq.ImageSender(connect_to='tcp://jeff-macbook:5555')
# sender = imagezmq.ImageSender(connect_to='tcp://192.168.1.190:5555')

rpi_name = socket.gethostname()  # send RPi hostname with each image
picam = VideoStream(usePiCamera=True).start()
time.sleep(2.0)  # allow camera sensor to warm up
jpeg_quality = 95  # 0 to 100, higher is better quality, 95 is cv2 default
try:
    with imagezmq.ImageSender(connect_to='tcp://192.168.86.34:5555') as sender:
        while True:  # send images as stream until Ctrl-C
            image = picam.read()
            ret_code, jpg_buffer = cv2.imencode(
                ".jpg", image, [int(cv2.IMWRITE_JPEG_QUALITY), jpeg_quality])
            reply_from_mac = sender.send_jpg(rpi_name, jpg_buffer)
            # above line shows how to capture REP reply text from Mac
except (KeyboardInterrupt, SystemExit):
Example #14
0
import imagezmq
import picamera
import io
import numpy as np
import socket
import time

sender = imagezmq.ImageSender(connect_to="tcp://192.168.50.117:5555")

with picamera.PiCamera() as camera:
    rpiName = socket.gethostname()
    camera.resolution = (1088, 720)
    while True:
        output = np.empty((720, 1088, 3), dtype=np.uint8)
        camera.capture(output, 'rgb')
        print("Sending...")
        msg = sender.send_image(rpiName, output)
        print(msg)
        time.sleep(0.5)
Example #15
0
 def __init__(self, address, IP_port, camera_port):
     self.sender = imagezmq.ImageSender(
         connect_to="tcp://{}:{}".format(address, IP_port))
     self.camera = camera.Camera(camera_port)
     self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     self.socket.bind(("127.0.0.1", 1234))
import time

# construct the argument parser and parse the arguments
#ap = argparse.ArgumentParser()
#ap.add_argument("-s", "--server-ip", required=True,
#        help="ip address of the server to which the client will connect")
#
#ap.add_argument("-c", "--camera", required=True,
#                help="define which camera to use picam or usbcam")
#args = vars(ap.parse_args())
#
#camera=args['camera']

# initialize the ImageSender object with the socket address of the
# server
sender = imagezmq.ImageSender(
    connect_to="tcp://{}:5555".format('192.168.1.153'))

# get the host name, initialize the video stream, and allow the
# camera sensor to warmup
rpiName = socket.gethostname()

vs = VideoStream(usePiCamera=True).start()

time.sleep(2.0)

while True:
    # read the frame from the camera and send it to the server
    frame = vs.read()
    sender.send_image(rpiName, frame)

#
test instructions are in that program: test_3_mac_receive_jpg.py.
"""

# import imagezmq from parent directory

import socket
import time
import cv2
from imutils.video import VideoStream
import imagezmq

face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')

# use either of the formats below to specifiy address of display computer
# sender = imagezmq.ImageSender(connect_to='tcp://jeff-macbook:5555')
sender = imagezmq.ImageSender()

rpi_name = 'It\'s Me'  # send RPi hostname with each image
picam = VideoStream(0).start()
time.sleep(2.0)  # allow camera sensor to warm up
jpeg_quality = 95  # 0 to 100, higher is better quality, 95 is cv2 default
while True:  # send images as stream until Ctrl-C
    image = picam.read()
    gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
    faces = face_cascade.detectMultiScale(gray, 1.1, 4)
    for (x, y, w, h) in faces:
        cv2.rectangle(image, (x, y), (x + w, y + h), (255, 0, 0), 2)
    ret_code, jpg_buffer = cv2.imencode(
        ".jpg", image, [int(cv2.IMWRITE_JPEG_QUALITY), jpeg_quality])
    sender.send_jpg(rpi_name, jpg_buffer)
Example #18
0
def initSender(out_port=5051):
    frameSender = imagezmq.ImageSender(connect_to="tcp://127.0.0.1:%d" %
                                       out_port)
    senderName = socket.gethostname()

    return frameSender, senderName
Example #19
0
# Date: 2021/04/08
# Description:
# The client 2 code that send the frame to server using imagezmq library.
# The PIR sensor code is also showed up, but they are commented as the sensor does not working properly.

# import the necessary packages
from imutils.video import VideoStream
import imagezmq
import socket
import time
import imutils
# import RPi.GPIO as GPIO

# GPIO.setmode(GPIO.BCM)
# PIR_PIN = 4
# GPIO.setup(PIR_PIN, GPIO.IN)

sender = imagezmq.ImageSender(connect_to='tcp://192.168.0.143:5555')

# get the host name, initialize the video stream, and allow the
# camera sensor to warmup
rpiName = socket.gethostname()
vs = VideoStream(usePiCamera=True).start()
time.sleep(2.0)

while True:
    # GPIO.setup(4, GPIO.OUT)
    # if GPIO.input(PIR_PIN):
    #     print("Motion detected")
    # read the frame from the camera and send it to the server
    frame = vs.read()
Example #20
0
 def send_frame(cls, name, adress):
     if name in cls.frameDict:
         print('send img')
         sender = imagezmq.ImageSender("tcp://{}:5001".format(adress))
         mem = cls.sender.send_image(list(cls.Dronedata_Dict[name]), name,
                                     cls.frameDict[name])
Example #21
0
"""test_4_pub.py -- basic send images test using PUB/SUB message pattern.

A simple test program that uses imagezmq to send images to a receiving program
that will display the images.

Brief test instructions are in the receiving program: test_1_pub.py.
"""

import sys
import time
import numpy as np
import cv2
import imagezmq

# Create an image sender in PUB/SUB (non-blocking) mode
sender = imagezmq.ImageSender(connect_to='tcp://*:5555', REQ_REP=False)

image_window_name = 'From Sender'
i = 0
while True:  # press Ctrl-C to stop image sending program
    # Increment a counter and print it's current state to console
    i = i + 1
    print('Sending ' + str(i))

    # Create a simple image
    image = np.zeros((400, 400, 3), dtype='uint8')
    green = (0, 255, 0)
    cv2.rectangle(image, (50, 50), (300, 300), green, 5)

    # Add an incrementing counter to the image
    cv2.putText(image, str(i), (100, 150), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 255, 255), 4)
Example #22
0
# request port to server
MQTT = mqtt.Client('camera_' + str(cam_no))
MQTT.connect(broker_ip, mqtt_port)
MQTT.on_message = on_message
MQTT.subscribe('server/camera/{}/port'.format(cam_no))

while zmq_port is None:
    MQTT.loop_start()
    MQTT.publish('camera/{}/port_request'.format(cam_no), str(camera_ip))
    print('camera/{}/port_request, {}'.format(cam_no, camera_ip))
    time.sleep(0.5)
MQTT.loop_stop()

# send request
sender = imagezmq.ImageSender(connect_to='tcp://' + server_ip + ':' +
                              str(zmq_port))
cap = cv2.VideoCapture(0)

while True:
    ret, frame = cap.read()
    try:
        if ret == True:
            frame = cv2.resize(frame, im_size)
            cv2.waitKey(1)
            sender.send_image('camera_' + str(cam_no), frame)
            if (cv2.waitKey(10) == ord('q')):
                break
    except:
        continue

cap.release()
Example #23
0
import socket as sk
import os
from threaded_cam import jetson_csi_camera
import sys
import json

#CAMSET = "nvarguscamerasrc sensor-id=0 tnr-strength=1 tnr-mode=2 ! video/x-raw(memory:NVMM), width=1640, height=1232, framerate=30/1, format=NV12 ! nvvidconv flip-method=2 ! video/x-raw, width=640, height=480, format=BGRx ! videoconvert ! video/x-raw, format=BGR ! appsink"
# CAMSET = "nvarguscamerasrc sensor-id=0 tnr-strength=1 tnr-mode=2 ! video/x-raw(memory:NVMM), width=1640, height=1232, framerate=30/1, format=NV12 ! nvvidconv flip-method=2 ! video/x-raw, width=640, height=480, format=BGRx ! videoconvert ! video/x-raw, format=BGR ! appsink"
# CAMSET='nvarguscamerasrc sensor-id=0 ! video/x-raw(memory:NVMM), width=3264, height=2464, framerate=21/1,format=NV12 ! nvvidconv flip-method=2 ! video/x-raw, width=640, height=480, format=BGRx ! videoconvert ! video/x-raw, format=BGR ! appsink'
# CAMSET = "nvarguscamerasrc sensor-id=0 ! video/x-raw(memory:NVMM), width=1640, height=1232, framerate=30/1, format=NV12 ! nvvidconv flip-method=2 ! video/x-raw, format=BGRx ! videoconvert ! video/x-raw, format=BGR ! appsink"
CAMSET = "nvarguscamerasrc sensor-id=0 tnr-strength=1 tnr-mode=2 ! video/x-raw(memory:NVMM), width=1640, height=1232, framerate=30/1, format=NV12 ! nvvidconv flip-method=2 ! video/x-raw, width=320, height=240, format=BGRx ! videoconvert ! video/x-raw, format=BGR ! appsink"

cap = jetson_csi_camera(CAMSET)

port = 5555
sender = imagezmq.ImageSender("tcp://*:{}".format(port), REQ_REP=False)
print("Input stream opened")
JPEG_QUALITY = 30
sender_name = sk.gethostname()

# Creates a socket instance
context = zmq.Context()
socket = context.socket(zmq.SUB)
# host = "192.168.1.197"
host = "10.42.0.1"
port = "5001"
# Connects to a bound socket
socket.connect("tcp://{}:{}".format(host, port))

# Subscribes to all topics
socket.subscribe("configs")
import socket
import time
from imutils.video import VideoStream
import imagezmq

raspberry_pi = True

sender = imagezmq.ImageSender(connect_to="tcp://192.168.8.1:5555")

rpi_name = socket.gethostname()
if raspberry_pi:
    picam = VideoStream(usePiCamera=True, resolution=(640, 480)).start()
else:
    picam = VideoStream().start()
time.sleep(2.0)

while True:
    image = picam.read()
    sender.send_image(rpi_name, image)
Example #25
0
# run this program on each RPi to send a labelled image stream
import socket
import time
from imutils.video import VideoStream
import imagezmq
sender = imagezmq.ImageSender(connect_to='tcp://127.0.0.1:5555')
rpi_name = socket.gethostname() # send RPi hostname with each image
picam = VideoStream(usePiCamera=False).start()
time.sleep(2.0)  # allow camera sensor to warm up
while True:  # send images as stream until Ctrl-C
    image = picam.read()
    sender.send_image(rpi_name, image)
Example #26
0
import socket
import time
from imutils.video import VideoStream
import imagezmq
import csi_cam
import cv2

sender = imagezmq.ImageSender(connect_to='tcp://172.16.0.37:5555')

# JET1 = socket.gethostname() # send RPi hostname with each image
camera = csi_cam.csiCamera()
time.sleep(2.0)  # allow camera sensor to warm up

print("start")

numFrames = 5

for i in range(numFrames):
    print('mark:{}'.format(i))
    img = camera.getFrame()
    while img is None:
        img = camera.getFrame()
    sender.send_image(i, img)
Example #27
0
# import the necessary packages
from imutils.video import VideoStream
import imagezmq
import argparse
import socket
import time

# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument(
    "-s",
    "--server-ip",
    required=True,
    help="ip address of the server to which the client will connect")
args = vars(ap.parse_args())

# initialize the ImageSender object with the socket address of the
# server
sender = imagezmq.ImageSender(
    connect_to="tcp://{}:5555".format(args["server_ip"]))
# get the host name, initialize the video stream, and allow the
# camera sensor to warmup
rpiName = socket.gethostname()
vs = VideoStream(usePiCamera=True, resolution=(320, 240)).start()
# vs = VideoStream(src=0).start()
time.sleep(2.0)

while True:
    # read the frame from the camera and send it to the server
    frame = vs.read()
    sender.send_image(rpiName, frame)
Example #28
0
 def connect_server(self):
     if self.debug : logging.debug('Server is tcp/' + str(self.server_host))
     connect_to = 'tcp://{}:5555'.format(self.server_host)
     self.image_sender = imagezmq.ImageSender(connect_to=connect_to)
     if self.debug: logging.debug('Image sender is inittialized.')
from imutils.video import VideoStream
from mlx90614 import MLX90614
from smbus2 import SMBus
import cv2
import imagezmq

bus = SMBus(1)
sensor = MLX90614(bus, address=0x5A)

print("[INFO] creating connection...")
sender = imagezmq.ImageSender(connect_to="tcp://10.0.0.162:50007")
print("[INFO] connecting established...")

rpiName = socket.gethostname()
print("[INFO] starting video stream...")
vs = VideoStream(src=0).start()
time.sleep(2.0)

jpeg_quality = 80

while True:
    frame = vs.read()
    frame = imutils.resize(frame, width=400)
    object_temp = sensor.get_object_1()

    msgDict = {
            "rpiName": rpiName,
            "object_temp": object_temp
        }

    ret_code, jpg_buffer = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), jpeg_quality])
import socket
import imagezmq
import requests
import numpy as np
import time
import cv2
from io import BytesIO

sender = imagezmq.ImageSender(connect_to='tcp://192.168.10.201:5557')
server_name = socket.gethostname()

ip_addr = '192.168.10.33'
stream_url = 'http://' + ip_addr + ':81/stream'

res = requests.get(stream_url, stream=True)

for chunk in res.iter_content(chunk_size=100000):
    if len(chunk) > 100:
        try:
            start_time = time.time()
            img_data = BytesIO(chunk)
            cv_img = cv2.imdecode(np.frombuffer(img_data.read(), np.uint8), 1)
            cv_resized_img = cv2.resize(cv_img, (800, 600), interpolation=cv2.INTER_AREA)
            elapsed_ms = (time.time() - start_time) * 1000
            cv2.waitKey(1)
            sender.send_image(server_name, cv_resized_img)
            cv2.waitKey(10)
            print(f'elapsed_ms : {elapsed_ms}')
        except Exception as e:
            print(e)
            continue