Ejemplo n.º 1
0
    def run(self):

        if not (self.__run_counting_cars or self.__run_detect_direction
                or self.__run_hscd or self.__run_traffic_violation_detection):
            print(
                "please setup individual modules before running the pipeline")
            sys.exit(0)

        print("OUTPUT WILL BE AT: " + str(self.__show_output_path()))
        print("ALERTS FOR THIS RUN WILL BE AT: " +
              str(self.__path_to_alert_poller()))

        tf.reset_default_graph()

        FLAGS = argHandler()
        FLAGS.setDefaults()

        FLAGS.demo = self.__video_path  # video file to use, or if camera just put "camera"
        FLAGS.model = "darkflow/cfg/yolo.cfg"  # tensorflow model
        FLAGS.load = "darkflow/bin/yolo.weights"  # tensorflow weights
        FLAGS.threshold = 0.35  # threshold of decetion confidance (detection if confidance > threshold )
        FLAGS.gpu = 0.85  # how much of the GPU to use (between 0 and 1) 0 means use cpu
        FLAGS.track = True  # whether to activate tracking or not
        FLAGS.trackObj = "car"  # the object to be tracked
        FLAGS.saveVideo = True  # whether to save the video or not
        FLAGS.BK_MOG = False  # activate background substraction using cv2 MOG substraction,
        # to help in worst case scenarion when YOLO cannor predict(able to detect mouvement, it's not ideal but well)
        # helps only when number of detection < 5, as it is still better than no detection.
        # (NOTE : deep_sort only trained for people detection )
        FLAGS.tracker = "deep_sort"  # wich algorithm to use for tracking deep_sort/sort

        FLAGS.skip = 0  # how many frames to skipp between each detection to speed up the network
        FLAGS.csv = False  # whether to write csv file or not(only when tracking is set to True)
        FLAGS.display = True  # display the tracking or not

        # modules
        FLAGS.counting_cars = self.__run_counting_cars  # to enable counting cars application module
        FLAGS.direction_detection = self.__run_detect_direction  # run direction detection or skip
        FLAGS.speed_estimation = self.__run_hscd  # run speed estimation or skip
        FLAGS.traffic_signal_violation_detection = self.__run_traffic_violation_detection

        # FLAGS.application_dir = os.getcwd()
        # FLAGS.user_input_video_name = self.__user_input_video_name

        FLAGS.location_name = self.__location_name
        FLAGS.path_to_output = self.__path_to_output
        FLAGS.start_time = self.__start_time
        tfnet = TFNet(FLAGS)

        tfnet.camera()
        print("End of Demo.")
Ejemplo n.º 2
0
from darkflow.darkflow.net.build import TFNet

FLAGS = argHandler()
FLAGS.setDefaults()

FLAGS.demo = "camera"  # video file to use, or if camera just put "camera"
FLAGS.model = "darkflow/cfg/yolo.cfg"  # tensorflow model
FLAGS.load = "darkflow/bin/yolo.weights"  # tensorflow weights
# FLAGS.pbLoad = "tiny-yolo-voc-traffic.pb" # tensorflow model
# FLAGS.metaLoad = "tiny-yolo-voc-traffic.meta" # tensorflow weights
FLAGS.threshold = 0.7  # threshold of decetion confidance (detection if confidance > threshold )
FLAGS.gpu = 0.8  #how much of the GPU to use (between 0 and 1) 0 means use cpu
FLAGS.track = False  # wheither to activate tracking or not
FLAGS.trackObj = [
    'Bicyclist', 'Pedestrian', 'Skateboarder', 'Cart', 'Car', 'Bus'
]  # the object to be tracked
#FLAGS.trackObj = ["person"]
FLAGS.saveVideo = True  #whether to save the video or not
FLAGS.BK_MOG = True  # activate background substraction using cv2 MOG substraction,
#to help in worst case scenarion when YOLO cannor predict(able to detect mouvement, it's not ideal but well)
# helps only when number of detection < 3, as it is still better than no detection.
FLAGS.tracker = "sort"  # wich algorithm to use for tracking deep_sort/sort (NOTE : deep_sort only trained for people detection )
FLAGS.skip = 0  # how many frames to skipp between each detection to speed up the network
FLAGS.csv = False  #whether to write csv file or not(only when tracking is set to True)
FLAGS.display = True  # display the tracking or not

tfnet = TFNet(FLAGS)

tfnet.camera()
exit('Demo stopped, exit.')