def main(args): logger = logging.getLogger(__name__) logger.setLevel(args.loglevel) try: os.remove(DATASET_DB) except OSError: pass with closing(sqlite3.connect(DATASET_DB)) as conn: c = conn.cursor() # Import metadata:classes ref = DATASET['metadata']['classes'] logger.info(f'Creating table {ref["table"]}') c.executescript(sql.create_table_classes(ref["table"])) logger.info(f'Importing classes from {ref["local_path"]} to {ref["table"]}') with open(ref["local_path"], 'r') as f: reader = csv.reader(f) for row in reader: c.execute(*sql.insert_into_classes(ref["table"], row)) # Import bboxes for ref in DATASET['bboxes'].values(): logger.info(f'Creating table {ref["table"]}') c.executescript(sql.create_table_bboxes(ref["table"])) logger.info(f'Importing from {ref["local_path"]} to {ref["table"]}') with open(ref["local_path"], 'r') as f: reader = csv.reader(f) next(reader) for row in reader: c.execute(*sql.insert_into_bboxes(ref["table"], row)) # Import labels for ref in DATASET['labels'].values(): logger.info(f'Creating table {ref["table"]}') c.executescript(sql.create_table_labels(ref["table"])) logger.info(f'Importing from {ref["local_path"]} to {ref["table"]}') with open(ref["local_path"], 'r') as f: reader = csv.reader(f) next(reader) for row in reader: c.execute(*sql.insert_into_labels(ref["table"], row)) # Import images for ref in DATASET['images'].values(): logger.info(f'Creating table {ref["table"]}') c.executescript(sql.create_table_images(ref["table"])) logger.info(f'Importing from {ref["local_path"]} to {ref["table"]}') with open(ref["local_path"], 'r') as f: reader = csv.reader(f) next(reader) for row in reader: c.execute(*sql.insert_into_images(ref["table"], row)) conn.commit()
def main(args): logger = logging.getLogger(__name__) logger.setLevel(args.loglevel) for image_path in args.images: print(image_path, file=sys.stderr, flush=True) cv2.imread(image_path)
def __init__(self, shunt_ohms, i2c, max_expected_amps=None, address=__ADDRESS, log_level=logging.ERROR): """Construct the class. At a minimum pass in the resistance of the shunt resistor and I2C interface to which the sensor is connected. Arguments: shunt_ohms -- value of shunt resistor in Ohms (mandatory). i2c -- an instance of the I2C class from the *machine* module, either I2C(1) or I2C(2) (mandatory). max_expected_amps -- the maximum expected current in Amps (optional). address -- the I2C address of the INA219, defaults to *0x40* (optional). log_level -- set to logging.DEBUG to see detailed calibration calculations (optional). """ logging.basicConfig(level=log_level) self._log = logging.getLogger("ina219") self._i2c = i2c self._address = address self._shunt_ohms = shunt_ohms self._max_expected_amps = max_expected_amps self._min_device_current_lsb = self._calculate_min_current_lsb() self._gain = None self._auto_gain_enabled = False
def main(args): logger = logging.getLogger(__name__) logger.setLevel(args.loglevel) images_dir = os.path.join(args.output_dir, 'images') os.makedirs(images_dir, exist_ok=True) labels_dir = os.path.join(args.output_dir, 'labels') os.makedirs(labels_dir, exist_ok=True) logger.info('Extract classes {} from {}'.format(args.extract_class_nos, args.images_list)) with open(args.images_list, 'r') as f: for image_path in f.readlines(): image_path = image_path.rstrip('\n') label_path = os.path.join( os.path.dirname(image_path.replace('/images/', '/labels/')), os.path.splitext(os.path.basename(image_path))[0] + '.txt') if not os.path.exists(label_path): logger.warn('Label file {} not found'.format(label_path)) continue labels = [] with open(label_path, 'r') as _f: reader = csv.reader(_f, delimiter=" ") for row in reader: if int(row[0]) in args.extract_class_nos: labels.append([ args.extract_class_nos.index(int(row[0])), *row[1:5] ]) if labels: logger.info('Found labels on {}'.format( os.path.basename(image_path))) image_dst_path = os.path.join(images_dir, os.path.basename(image_path)) label_dst_path = os.path.join(labels_dir, os.path.basename(label_path)) logger.info('Copy image {} -> {}'.format( image_path, image_dst_path)) shutil.copyfile(image_path, image_dst_path) logger.info('Write labels -> {}'.format(label_path)) with open(label_dst_path, 'w') as _f: writer = csv.writer(_f, delimiter=" ") writer.writerows(labels)
def main(args): logger = logging.getLogger(__name__) logger.setLevel(args.loglevel) os.makedirs(os.path.join(DATASET_DIR, 'org'), exist_ok=True) for group, dataset in DATASET.items(): for subgroup, subdataset in dataset.items(): logger.info( f'[{group}:{subgroup}] ' f'Downloading {subdataset["remote_url"]} -> {subdataset["local_path"]}' ) response = requests.get(subdataset['remote_url'], stream=True) with open(subdataset['local_path'], 'wb') as f: for chunk in response.iter_content(chunk_size=1024): if chunk: f.write(chunk)
STANDARD_DURATION_S = 1 BLINKING_DURATION_S = 60 WAIT_FOR_TUNING_S = 30 WATCHDOG_TIMEOUT_MS = 6 * 60 * 1000 MAX_INACTIVITY_TIME_S = 60 * 60 # min * sec FIRST_INTERVAL_INACTIVITY_S = MAX_INACTIVITY_TIME_S / 16 # =225 sec EXP_BACKOFF_INACTIVITY = 2 OVERSHOOT_DETECTION_PAUSE_S = 60 # sec RESTART_OFFSET_TIME_S = 24 * 60 * 60 + ( int.from_bytes(os.urandom(2), "big") % 0x0FFF) # define restart time = 1 day + (0 .. 4095) seconds # setup the logging log = logging.getLogger() log.info("RESTART IN: {} s".format(RESTART_OFFSET_TIME_S)) # get the reason for reset in readable form RESET_REASON = translate_reset_cause(machine.reset_cause()) ################################################################################ # State Machine class StateMachine(object): def __init__(self): self.wdt = machine.WDT(timeout=WATCHDOG_TIMEOUT_MS) self.wdt.feed() self.state = None
def main(args): logger = logging.getLogger(__name__) logger.setLevel(args.loglevel) image_id = args.image_id with closing(sqlite3.connect(DATASET_DB)) as conn: cur = conn.cursor() query, params = sql.select_images( bboxes_table=DATASET['bboxes'][args.set]['table'], labels_table=DATASET['labels'][args.set]['table'], images_table=DATASET['images'][args.set]['table'], image_id=image_id, ) cur.execute(query, params) row = cur.fetchone() if not row: logger.warn(f'[{args.set}:{image_id}] Image not found') return _, org_url, thumb_url, rotation = row query, params = sql.select_labels( bboxes_table=DATASET['bboxes'][args.set]['table'], labels_table=DATASET['labels'][args.set]['table'], image_id=image_id, class_names=args.classes, ) cur.execute(query, params) labels = cur.fetchall() if is_url_available(thumb_url): image_url = thumb_url image_type = 'thumb' elif is_url_available(org_url): image_url = org_url image_type = 'org' else: logger.warn(f'[{args.set}:{image_id}] Image unavailable') return logger.info( f'[{args.set}:{image_id}:{image_type}] Downloading image {image_url}') response = requests.get(image_url) with tempfile.NamedTemporaryFile(delete=False) as f: f.write(response.content) img_path = f.name img_bgr = cv2.imread(img_path) if img_bgr is None: logger.warn(f'[{args.set}:{image_id}] Invalid image') os.remove(img_path) return img_height, img_width, _ = img_bgr.shape os.remove(img_path) logger.info(f'[{args.set}:{image_id}:{image_type}] Drawing preview') classes = args.classes if args.classes else list( set([r[0] for r in labels])) for class_name, x, y, width, height in labels: logger.debug( f'[{args.set}:{image_id}:{image_type}] {x} {y} {width} {height} {rotation}' ) x, y, width, height = rotate_bbox(x, y, width, height, rotation) left = int((x - width / 2) * img_width) right = int((x + width / 2) * img_width) top = int((y - height / 2) * img_height) bottom = int((y + height / 2) * img_height) color = BBOX_COLORS[classes.index(class_name) % len(BBOX_COLORS)] cv2.rectangle(img_bgr, (left, top), (right, bottom), color, 1) cv2.putText(img_bgr, class_name, (left, top + 12), cv2.FONT_HERSHEY_SIMPLEX, .5, color, lineType=cv2.LINE_AA) logger.info(f'[{args.set}:{image_id}:{image_type}] Displaying preview') cv2.imshow('preview', img_bgr) cv2.waitKey(0) cv2.destroyAllWindows()
def main(args): logger = logging.getLogger(__name__) logger.setLevel(args.loglevel) images_dir = os.path.join(IMAGES_DIR, args.set) os.makedirs(images_dir, exist_ok=True) labels_dir = os.path.join(LABELS_DIR, args.set) os.makedirs(labels_dir, exist_ok=True) previews_dir = os.path.join(PREVIEWS_DIR, args.set) if not args.without_preview: os.makedirs(previews_dir, exist_ok=True) with closing(sqlite3.connect(DATASET_DB)) as conn: cur = conn.cursor() if args.classes: classes = args.classes else: cur.execute(sql.select_classes()) classes = [c[1] for c in cur.fetchall()] query, params = sql.select_images( bboxes_table=DATASET['bboxes'][args.set]['table'], labels_table=DATASET['labels'][args.set]['table'], images_table=DATASET['images'][args.set]['table'], class_names=args.classes, limit=args.limit, offset=args.offset) for row in conn.execute(query, params): image_id, org_url, thumb_url, rotation = row if is_url_available(thumb_url): image_path = os.path.join(images_dir, make_image_name(image_id, thumb_url)) image_url = thumb_url image_type = 'thumb' elif is_url_available(org_url): image_path = os.path.join(images_dir, make_image_name(image_id, org_url)) image_url = org_url image_type = 'org' else: logger.warn(f'[{args.set}:{image_id}] Image unavailable') continue if os.path.exists(image_path) and not args.overwrite: logger.info( f'[{args.set}:{image_id}:{image_type}] {image_path} already exists' ) else: logger.info(f'[{args.set}:{image_id}:{image_type}] ' f'Downloading image {image_url} -> {image_path}') response = requests.get(image_url) with open(image_path, 'wb') as f: f.write(response.content) img_bgr = cv2.imread(image_path) if img_bgr is None: logger.warn(f'[{args.set}:{image_id}] Invalid image') os.remove(image_path) continue labels = [] labels_path = os.path.join(labels_dir, f'{image_id}.txt') if os.path.exists(labels_path) and not args.overwrite: logger.info( f'[{args.set}:{image_id}:label] {labels_path} already exists' ) else: logger.info( f'[{args.set}:{image_id}:label] Writing labels -> {labels_path}' ) _query, _params = sql.select_labels( bboxes_table=DATASET['bboxes'][args.set]['table'], labels_table=DATASET['labels'][args.set]['table'], image_id=image_id, class_names=args.classes, ) cur.execute(_query, _params) labels = cur.fetchall() with open(labels_path, 'w') as f: for class_name, x, y, width, height in labels: x, y, width, height = rotate_bbox( x, y, width, height, rotation) f.write( f'{classes.index(class_name)} {x} {y} {width} {height}\n' ) if not args.without_preview: preview_path = os.path.join(previews_dir, os.path.basename(image_path)) if os.path.exists(preview_path) and not args.overwrite: logger.info( f'[{args.set}:{image_id}:preview] {preview_path} already exists' ) else: logger.info( f'[{args.set}:{image_id}:preview] Drawing preview -> {preview_path}' ) img_height, img_width, _ = img_bgr.shape for class_name, x, y, width, height in labels: logger.debug(f'[{args.set}:{image_id}:{image_type}] ' f'{x} {y} {width} {height} {rotation}') x, y, width, height = rotate_bbox( x, y, width, height, rotation) left = int((x - width / 2) * img_width) right = int((x + width / 2) * img_width) top = int((y - height / 2) * img_height) bottom = int((y + height / 2) * img_height) color = BBOX_COLORS[classes.index(class_name) % len(BBOX_COLORS)] cv2.rectangle(img_bgr, (left, top), (right, bottom), color, 1) cv2.putText(img_bgr, class_name, (left, top + 12), cv2.FONT_HERSHEY_SIMPLEX, .5, color, lineType=cv2.LINE_AA) cv2.imwrite(preview_path, img_bgr)