def best_model_saver(model_name): mkdir('outputs/models/' + model_name) return ModelCheckpoint('outputs/models/' + model_name + '/weights.best.h5', monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)
def _process(kwargs): pid = os.getpid() mac = kwargs["mac"] log = kwargs["log"] syn_para = kwargs["syn_para"] acr_para = kwargs["acr_para"] img_para = kwargs["img_para"] ip_tv_para = kwargs["ip_tv"] stat_para = kwargs["stat_cfg"] acr_queue = aioqueue.Queue(max_size=acr_para.queue_cap) mkdir(log["path"]) log_init(log) ip_tv_init(ip_tv_para.url, ip_tv_para.local) threads = [ SynThread( mac=mac, syn_para=syn_para, acr_para=acr_para, img_para=img_para, acr_queue=acr_queue, stat_para=stat_para, ), AcrLog( acr_para=acr_para, acr_queue=acr_queue, stat_para=stat_para, ) ] def _signal(sig, frame): logging.info(f"{os.getpid()} recv signal {sig} {frame}") for w in threads: w.close() signal.signal(signal.SIGINT, _signal) signal.signal(signal.SIGTERM, _signal) signal.signal(signal.SIGABRT, _signal) logging.info(f"process {pid} start... ") try: for task in threads: task.start() for task in threads: task.join() except: logging.error(f"process {pid} over... ") logging.info(f"process {pid} end... ")
async def _process(self, message): msg = message.value try: gz_id = msg["gzid"] img_name = msg["img_name"] device_id = msg["device_id"] device_time = msg["create_time"] logging.info( f"thread:{self.name}, {message.partition}," f" {message.offset}, {device_time}, {gz_id}, {device_id}, {img_name}" ) img_code = base64.b64decode(msg["img_code"]) create_time = datetime.datetime.strptime(msg["create_time"], "%Y-%m-%d %H:%M:%S") save_time = create_time - datetime.timedelta(hours=2) save_date = save_time.strftime("%Y-%m-%d") dev_dir = os.path.join(self.archive_dir, self.mac, save_date, f"{gz_id}_900000_{device_id}_{save_date}") if not (os.path.exists(dev_dir) and os.path.isdir(dev_dir)): mkdir(dev_dir) list_csv = os.path.join(dev_dir, self.img_para.dev_list) img_file = os.path.join(dev_dir, f"{img_name}") meta = [] for key in MsgField: meta.append(f"{msg.get(key, '')}") meta[0] = meta[0].rsplit(".", 1)[0] meta_line = "\t".join(meta) async with aiofiles.open(img_file, 'wb') as wf: await wf.write(img_code) async with aiofiles.open(list_csv, mode='a') as wf: await wf.write(f"{meta_line}\n") return AcrImgMeta(date=save_date, meta=meta, path=img_file, topic=message.topic, part=message.partition, offset=message.offset) except asyncio.CancelledError: logging.info(f"thread:{self.name} process safe stop") except: logging.error( f"thread:{self.name} process msg failed exception {traceback.format_exc()}" ) return None
async def _worker(self, name, log_name): logging.info(f"thread:{self.name} {name} start ...") try: lines = 0 cur_fd = None cur_date = None async with ClientSession() as session: while self._running: msg = await self._chan.get() resp = await self._classify(session, msg.path) if resp[2] == "UNKNOWN": resp += self._non_ip_tv else: resp += query_broadband(resp[2]) if cur_fd is None or cur_date != msg.date: if cur_fd: cur_fd.close() cur_date = msg.date log_path = os.path.join(self.log_dir, cur_date) mkdir(log_path) log_file = os.path.join(log_path, log_name) cur_fd = open(log_file, mode='a') lines = 0 resp_meta = msg.meta[:-3] + resp + msg.meta[-3:] line = ",".join(resp_meta) cur_fd.write(f"{line}\n") if lines % 10 == 0: cur_fd.flush() async with self._lock: key = f"{msg.topic}_{msg.part}" self.offset[key] = { "topic": msg.topic, "part": msg.part, "offset": msg.offset } except asyncio.CancelledError: logging.info(f"thread:{self.name} {name} stop work") except: logging.error( f"thread:{self.name} {name} error found cloud acr {traceback.format_exc()}" ) logging.info(f"thread:{self.name} {name} over ...")
for item in misclassified: print(item[2], 'is of class', item[0], 'but was classified as', item[1]) print(classification_report(ytrue, ypred)) matrix = confusion_matrix(ytrue, ypred) print(matrix) for row in matrix: amax = sum(row) if amax > 0: for i in range(len(row)): row[i] = row[i] * 100.0 / amax print(matrix) print("Plotting misclassified") base_dir = "../outputs/misclassified" mkdir(base_dir) misclassified = sorted(misclassified, key=lambda t: t[2]) for item in misclassified: print("Saving to " + item[2]) row = loader.load_data_from_file(item[2]) [ts, fts, rpeaks, tts, thb, hrts, hr] = ecg.ecg(signal=row, sampling_rate=loader.FREQUENCY, show=False) __draw_to_file( base_dir + "/" + item[2] + "_" + item[0] + "_" + item[1] + ".png", tts, thb)
def enable_logging(prefix="log", file_log_enabled=True): current_time = datetime.now().strftime("%Y%m%d_%H%M%S") system.mkdir('logs') sys.stdout = Tee('logs/' + prefix + '_' + current_time + '.log', file_log_enabled)