Example #1
0
def worker(input_q, output_q, cap_params, frame_processed):
    print(">> Loading frozen model for worker.")
    detection_graph, sess = detector_utils.load_inference_graph()
    sess = tf.compat.v1.Session(graph=detection_graph)
    while True:
        frame = input_q.get()
        if (frame is not None):
            '''Boxes contain coordinates for detected hands
            Scores contains condfidence levels
            If len(boxes) > 1, at least one hand is detected
            You can change the score_thresh value as desired'''
            boxes, scores = detector_utils.detect_objects(
                frame, detection_graph, sess)

            # Draws bounding boxes
            detector_utils.draw_box_on_image(cap_params['num_hands_detect'],
                                             cap_params["score_thresh"],
                                             scores, boxes,
                                             cap_params['im_width'],
                                             cap_params['im_height'], frame)
            # Adds frame annotated with bounding box to queue
            output_q.put(frame)
            frame_processed += 1
        else:
            output_q.put(frame)
    sess.close()
Example #2
0
def worker_hands(input_q, output_q):
    detection_graph, sess = detector_utils.load_inference_graph()
    sess = tf.Session(graph=detection_graph)
    while True:
        frame = input_q.get()
        if frame is not None:
            boxes, scores = detector_utils.detect_objects(
                frame, detection_graph, sess)
            output_q.put((boxes, scores))
        else:
            output_q.put((boxes, scores))
    sess.close()
box_padding = 0.12

checkpoint_path = '../extra/model/east_icdar2015_resnet_v1_50_rbox'

#get the start time
start_time = datetime.datetime.now()

#max number of hands we want to detect
num_hands_detect = 1

score_thresh = 0.4

timer_start = 0

#load hands detection model
detection_graph, sess1 = detector_utils.load_inference_graph()

timer = 0

#flag indicating whether the patient finish the whole process
finished = 0

hands = []

def main(argv=None):


	global im
	process_start = time.time()

	#location of the instructions
Example #4
0
import numpy as np

import keras
from keras.models import Model, load_model
from keras.preprocessing import image as KerasImage
import cv2

import string
import random
import requests
import os


TrashRoutes = Blueprint('TrashRoutes', __name__)

detection_graph, session = load_inference_graph()
MIN_THRESHOLD = 0.5

prediction_list=['cardboard', 'glass', 'trash', 'paper', 'plastic', 'trash']

# with CustomObjectScope({'relu6': ReLU,'DepthwiseConv2D': DepthwiseConv2D}):
model=load_model('trained_model.h5')
model._make_predict_function()
labels={0: 'cardboard', 1: 'glass', 2: 'trash', 3: 'paper', 4: 'plastic', 5: 'trash'}


@TrashRoutes.route('/trashAll/', methods=["GET"])
def get_all_trash():
    trash = Trash.query.all()
    return jsonify([t.serialize() for t in trash])
Example #5
0
 def __init__(self):
     self.detection_graph, self.sess = detector_utils.load_inference_graph()