def main_find_qr(): shutil.rmtree(Cfg.log_folder, ignore_errors=True) os.makedirs(Cfg.log_folder, exist_ok=True) Debug.set_params(log_folder=Cfg.log_folder, log_image=Cfg.verbose) for folder in sorted(glob.glob(f'{Cfg.inp_folders}')): if not os.path.isdir(folder): continue files_cnt = 0 files_found = 0 for fname_path in glob.glob(f'{folder}/{Cfg.inp_fname_mask}'): Debug.set_log_image_names(fname_path) ok_cnt = process_file(fname_path) # ok_cnt=0 # find_offset(fname_path) if ok_cnt: files_found += 1 files_cnt += 1 print( f'\t\t{fname_path}: {ok_cnt} marks. Files: ' f'{files_found}/{files_cnt} rate={100*files_found/files_cnt:.2f}%') print(f'Folder {folder}: {files_found}/{files_cnt} rate={100*files_found/files_cnt:.2f}%')
def main_find_offset(): MeterXy = collections.namedtuple('MeterXy', ['file_name', 'meter_id', 'code', 'x', 'y']) MeterOff = collections.namedtuple('MeterXy', ['file_name', 'meter_id', 'code', 'x', 'y', 'off_x', 'off_y']) folder = '../tmp/out/images/2019-08-08' # read meter_xy.csv with open(Cfg.xy_info_file, newline='') as csvfile: reader = csv.reader(csvfile, delimiter=',', quotechar='"') next(reader, None) # skip headers meter_xy_list = [MeterXy(row[0], row[1], row[2], row[3], row[4]) for row in reader] shutil.rmtree(Cfg.log_folder, ignore_errors=True) os.makedirs(Cfg.log_folder, exist_ok=True) Debug.set_params(log_folder=Cfg.log_folder, log_image=Cfg.verbose) offsets = [] for m in meter_xy_list: # if not str(m.file_name).endswith('447_v'): # 554 # continue fname_path = f'{folder}/{m.file_name}.jpg' Debug.set_log_image_names(fname_path) off = find_offset(fname_path, (m.x, m.y), m.meter_id, m.code) offsets.append(MeterOff(m.file_name, m.meter_id, m.code, m.x, m.y, off[0], off[1])) offsets = sorted(offsets, key=lambda x: x.meter_id) print('offsets:\n\t', '\n\t'.join([f'{m.meter_id} : {m.file_name} : ({m.off_x:.5f},{m.off_y:.5f}) ' f'xy:({m.x},{m.y})' for m in offsets if m.off_x != 9999.]), sep='') for meter_id in sorted(list(set([off.meter_id for off in offsets])), key=lambda x: int(x)): lst = [(m.off_x, m.off_y) for m in offsets if m.meter_id == meter_id and m.off_x != 9999] xy = [[l[i] for l in lst] for i in [0, 1]] # avg = (statistics.mean([l[0] for l in lst]), statistics.mean([l[0] for l in lst])) avg = (statistics.mean(xy[0]), statistics.mean(xy[1])) min_v = (min(xy[0]), min(xy[1])) max_v = (max(xy[0]), max(xy[1])) diff = (max_v[0] - min_v[0], max_v[1] - min_v[1]) print(f'meter_id={meter_id} ' f'avg=({avg[0]: .5f},{avg[1]: .5f}) ' f'min=({min_v[0]: .5f},{min_v[1]: .5f}) ' f'max=({max_v[0]: .5f},{max_v[1]: .5f})' f'diff=({diff[0]: .5f},{diff[1]: .5f})' )
def main(): logger.debug('metering - start') Debug.set_params(log_folder=Cfg.log_folder, log_image=Cfg.log_image) if not os.path.isdir(Cfg.inp_folder): logger.error(f'Input folder {Cfg.inp_folder} does not exist.') return try: while True: if Cfg.need_sync: sync_meterings() files_list = sorted( glob.glob(f'{Cfg.inp_folder}/{Cfg.inp_fname_mask}')) if not len(files_list): if _GroupEquip.ready_to_analyze(event='empty_dir'): Analyzer.run() logger.debug(f'timeout {Cfg.inp_timeout} sec') time.sleep(Cfg.inp_timeout) # sec continue start = datetime.datetime.now() for (files_cnt, fname_path_flir) in enumerate(files_list, 1): cnt, meter_ids = take_readings(fname_path_flir) remove_input_files(fname_path_flir) if not cnt or not len(meter_ids): continue # skip it, no mark/equips/readings here logger.info( f'{files_cnt} of {len(files_list)}: {fname_path_flir} readings:{cnt} ' f'equip:{list(set([Db.meter_to_equip(m) for m in meter_ids]))}' ) seconds = (datetime.datetime.now() - start).seconds print( f'Processed {files_cnt} files in {seconds:.0f}s, ({seconds/files_cnt:.0f} sec/file)' ) Db.close() except KeyboardInterrupt: logger.info('metering is interrupted by user') if _GroupEquip.ready_to_analyze(event='the_end'): Analyzer.run() finally: Db.close()