class PoSDBoS(object): def __init__(self, networkFile=None, demo=False, demoFile=None): '''Main class for drowsiness detection :param string networkFile: file name of the saved neural network (path: "/../../data/<networkFile>.nn") ''' self.demo = demo self.running = True self.config = ConfigProvider() self._initPoSDBoS() self._initNeuralNetwork(networkFile) self._initFeatureExtractor(demoFile) self.dm = DrowsinessMonitor() self.fileUtil = EEGTableFileUtil() def _initPoSDBoS(self): posdbosConfig = self.config.getPoSDBoSConfig() self.drowsyMinCount = posdbosConfig.get("drowsyMinCount") self.awakeMinCount = posdbosConfig.get("awakeMinCount") self.classified = [0, 0] self.curClass = 0 self.classCount = 0 self.found = 0 def _initNeuralNetwork(self, networkFile): nnCreate = self.config.getNNInitConfig() self.nn = NeuralNetwork() if networkFile == None: self.nn.createNew(**nnCreate) else: self.nn.load(networkFile) def _initFeatureExtractor(self, demoFile): self.demoFile = demoFile collector = self._initDataCollector(self.demoFile) self.fe = FeatureExtractor(collector) self.inputQueue = self.fe.extractQueue def _initDataCollector(self, demoFile): collectorConfig = self.config.getCollectorConfig() if self.demo: return DummyDataCollector(demoFile, **collectorConfig) else: return EEGDataCollector(None, **collectorConfig) def close(self): self.running = False def run(self): fet = threading.Thread(target=self.fe.start) fet.start() dmt = threading.Thread(target=self.dm.run) dmt.start() features = [] total = 0 start = time.time() c = [] while self.running and dmt.is_alive(): try: #awake = 0, drowsy = 1 data = self.inputQueue.get(timeout=1) features.append(data) clazz = self.nn.activate(data, True) c.append([clazz, clazz]) self.setStatus(clazz) total += 1 except Empty: print "needed %sms for %d windows" % (time.time() - start, total) pass except KeyboardInterrupt: self.close() except Exception as e: print e.message self.close() #self.writeFeature(c) self.fe.close() self.dm.close() dmt.join() def setStatus(self, clazz): self.classified[clazz] += 1 if self.curClass == clazz: self.classCount += 1 else: self.curClass = clazz self.classCount = 0 info = "class %d row (%s)" % (clazz, str(self.classCount)) if clazz == 1 and self.classCount >= self.drowsyMinCount: self.dm.setStatus(clazz, info) self.found += 1 elif clazz == 0 and self.classCount >= self.awakeMinCount: self.dm.setStatus(clazz, info) def writeFeature(self, data): filePath = scriptPath + "/../data/" + "classes.csv" #filePath = scriptPath + "/../data/" + "drowsy_full_.csv" header = ["clazz", "clazz2"] #start = 4 #end = start + len(data[0])/6 #for field in self.config.getCollectorConfig().get("fields"): # header.extend([str(x) + "Hz" + field for x in range(start, end)]) self.fileUtil.writeFile(filePath, data, header)
help="number of classes (1001)") args = parser.parse_args() # resnet_v2_101/logits,resnet_v2_101/pool4 => to list of layer names layer_names = args.layer_names.split(",") # Initialize the feature extractor feature_extractor = FeatureExtractor( network_name=args.network_name, checkpoint_path=args.checkpoint, batch_size=args.batch_size, num_classes=args.num_classes, preproc_func_name=args.preproc_func, preproc_threads=args.num_preproc_threads) # Print the network summary, use these layer names for feature extraction #feature_extractor.print_network_summary() # Feature extraction example using a filename queue to feed images feature_dataset = feature_extraction_queue(feature_extractor, args.image_path, layer_names, args.batch_size, args.num_classes) # Write features to disk as HDF5 file utils.write_hdf5(args.out_file, layer_names, feature_dataset) print("Successfully written features to: {}".format(args.out_file)) # Close the threads and close session. feature_extractor.close() print("Finished.")
class Worker: def __init__(self, input_stream, output_stream, w=-1, h=-1, fps=-1, frames=-1, force_gray=False, repetitions=1, options=None, resume=False, reset_stream_when_resuming=False): self.input_stream = input_stream self.output_stream = output_stream self.repetitions = repetitions self.__completed_repetitions = 0 self.__start_time = None self.__elapsed_time = None self.__rho = options['rho'] self.steps = 0.0 self.measured_fps = 0.0 self.save_scores_only = options['save_scores_only'] options['stream'] = self.input_stream self.input_stream.set_options(w, h, fps, force_gray, frames) self.fe = FeatureExtractor( w, h, options, resume) # here is the TensorFlow based feature extractor! self.blink_steps = [] if resume: out("RESUMING...") self.load(reset_stream_when_resuming) def close(self): self.fe.close() def save(self): self.fe.save() info = { 'steps': self.steps, 'frame': self.input_stream.get_last_frame_number(), 'time': self.input_stream.get_last_frame_time(), 'blink_steps': self.blink_steps } f = open(self.fe.save_path + ".info.txt", "w") if f is None or not f or f.closed: raise IOError("Cannot access: " + self.fe.save_path + ".info.txt") json.dump(info, f, indent=4) f.close() def load(self, reset_stream=False): if not reset_stream: f = open(self.fe.save_path + ".info.txt", "r") if f is None or not f or f.closed: raise IOError("Cannot access: " + self.fe.save_path + ".info.txt") info = json.load(f) f.close() self.steps = info['steps'] self.fe.load(self.steps) self.input_stream.get_next( sample_only=True) # ensure that the stream is open self.input_stream.set_last_frame_and_time(info['frame'], info['time']) self.output_stream.set_last_frame(info['frame']) else: self.steps = 0.0 self.fe.load(1) self.output_stream.create_folders( True) # clearing folders and recreating them self.fe.activate_tensor_board() def run_step(self): while True: # get time status = True step_time = time.time() if self.__start_time is None: self.__start_time = step_time # get the frame to process at the next step and the currently needed motion field step_load_time = time.time() current_img, current_of = self.input_stream.get_next( blur_factor=(1.0 - self.__rho)) # handling repetitions if current_img is None: self.__completed_repetitions = self.__completed_repetitions + 1 if self.__completed_repetitions < self.repetitions: self.input_stream.reset() else: break else: break # check if the stream has ended if current_img is not None: step_load_time = time.time() - step_load_time # extracting features features, filters, obj_values, obj_comp_values, is_night, next_rho, mi_real_full, motion_full = \ self.fe.run_step(current_img, current_of) # output-info if is_night == 1.0: light = "night" else: light = "day" out("\t[status=" + light + ", rho=" + str(self.__rho) + ", action_cur=" + str(obj_values[1]) + ", mi_real_full=" + str(mi_real_full) + ", motion_full=" + str(motion_full) + ",\n\t mi_real=" + str(obj_values[5]) + ", mi=" + str(obj_values[4]) + ", ce=" + str(obj_values[2]) + ", minus_ge=" + str(obj_values[3]) + ", motion=" + str(obj_values[6]) + ",\n\t norm_q=" + str(obj_values[7]) + ", q'q''=" + str(obj_values[10]) + ", norm_q'=" + str(obj_values[8]) + "/" + "{0:.2f}".format(self.fe.eps1) + ", norm_q''=" + str(obj_values[9]) + "/" + "{0:.2f}".format(self.fe.eps2) + ", norm_q'''=" + str(obj_values[11]) + "/" + "{0:.2f}".format(self.fe.eps3) + "]") others = { 'status': light, 'rho': float(self.__rho), 'action_cur': float(obj_values[1]), 'mi_real_full': float(mi_real_full), 'motion_full': float(motion_full), 'mi_real': float(obj_values[5]), 'mi': float(obj_values[4]), 'ce': float(obj_values[2]), 'minus_ge': float(obj_values[3]), 'motion': float(obj_values[6]), 'norm_q': float(obj_values[7]), 'norm_q_mixed': float(obj_values[10]), 'norm_q_dot': float(obj_values[8]), 'norm_q_dot_dot': float(obj_values[9]), 'norm_q_dot_dot_dot': float(obj_values[11]), 'eps1': self.fe.eps1, 'eps2': self.fe.eps2, 'eps3': self.fe.eps3 } # print("FEATURES:") # print(features) # print("FILTERS:") # print(filters) # checking errors if not np.isfinite(filters).any() or np.isnan(filters).any(): raise ValueError("Filters contain NaNs or Infinite!") if not np.isfinite(features).any() or np.isnan(features).any(): raise ValueError("Feature maps contain NaNs or Infinite!") # save output step_save_time = time.time() if not self.save_scores_only: self.output_stream.save_next(current_img, current_of, features, filters, others) step_save_time = time.time() - step_save_time # updating rho (print only) self.__rho = next_rho # stats step_time = time.time() - step_time self.__elapsed_time = time.time() - self.__start_time self.steps = self.steps + 1.0 self.measured_fps = self.steps / self.__elapsed_time if is_night == 1.0: self.blink_steps.append(self.steps) # saving model (every 1000 steps) if int(self.steps) % 1000 == 0: self.save() else: status = False step_load_time = time.time() - step_load_time step_save_time = 0.0 # stats step_time = time.time() - step_time self.__elapsed_time = time.time() - self.__start_time return status, step_time, (step_time - step_save_time), (step_time - step_save_time - step_load_time)
class PoSDBoS(object): def __init__(self, networkFile=None, demo=False, demoFile=None): '''Main class for drowsiness detection :param string networkFile: file name of the saved neural network (path: "/../../data/<networkFile>.nn") ''' self.demo = demo self.running = True self.config = ConfigProvider() self._initNeuralNetwork(networkFile) self._initFeatureExtractor(demoFile) self.dm = DrowsinessMonitor() self.fileUtil = EEGTableFileUtil() def _initNeuralNetwork(self, networkFile): nn_conf = self.config.getNeuralNetworkConfig() self.nn = NeuralNetwork() if networkFile == None: self.nn.createNew(nn_conf["nInputs"], nn_conf["nHiddenLayers"], nn_conf["nOutput"], nn_conf["bias"]) else: self.nn.load(networkFile) def _initFeatureExtractor(self, demoFile): collector = self._initDataCollector(demoFile) self.fe = FeatureExtractor(collector) self.inputQueue = self.fe.extractQueue def _initDataCollector(self, demoFile): collectorConfig = self.config.getCollectorConfig() if self.demo: return DummyDataCollector(demoFile, **collectorConfig) else: return EEGDataCollector(None, **collectorConfig) def close(self): self.running = False def run(self): fet = threading.Thread(target=self.fe.start) fet.start() dmt = threading.Thread(target=self.dm.run) dmt.start() features = [] while self.running and dmt.is_alive(): try: data = self.inputQueue.get(timeout=1) features.append(data) x = random.randint(1, 10)%2 y = random.randint(1, 10)%2 data = (x, y) clazz = self.nn.activate(data) info = "%d XOR %d is %d; queue: %d" % (x, y, clazz, self.inputQueue.qsize()) self.dm.setStatus(clazz, info) #sleep(1) except Empty: pass #if self.demo: # self.close() except KeyboardInterrupt: self.close() except Exception as e: print e.message self.close() self.writeFeature(features) self.fe.close() self.dm.close() dmt.join() def writeFeature(self, data): filePath = scriptPath + "/../data/" + "test.csv" header = [] for field in ["F3", "F4", "F7", "F8"]: for i in range(1, 5): header.append("%s_%s" % (field ,str(i))) self.fileUtil.writeFile(filePath, data, header)