def __init__(self): super(CalorieTrackerGUI, self).__init__() # setting the widget win = QWidget(self) self.setCentralWidget(win) self.setWindowTitle("Calorie Tracker") # makes a grid layout and sets it to the widget grid = QGridLayout() win.setLayout(grid) # Buttons and Checkboxes etc. add_calories_button = QPushButton("Add calories") add_calories_button.clicked.connect(self.add_calories_button_clicked) self.calories_blank_line = QLineEdit() if read_cell(json_load(), 2) is not None: self.calories_left_label = QLabel("Calories left for Today: " + str(get_global("calorie_goal") - read_cell(json_load(), 2))) else: self.calories_left_label = QLabel("Calories left for Today: " + str(get_global("calorie_goal"))) reset_button = QPushButton("Start of the day") reset_button.clicked.connect(self.start_day) note_today_weight_button = QPushButton("Enter today's weight") note_today_weight_button.clicked.connect(self.get_weight) # grid; button placement row = 0 grid.addWidget(QLabel("Calorie Goal: " + str(get_global("calorie_goal"))), row, 0, 1, 1) row += 1 grid.addWidget(self.calories_blank_line, row, 0, 1, 1) grid.addWidget(add_calories_button, row, 1, 1, 1) row += 1 grid.addWidget(self.calories_left_label, row, 0, 1, 1) row += 1 grid.addWidget(note_today_weight_button, row, 0, 1, 2) row += 1 grid.addWidget(reset_button, row, 1, 1, 1) self.show()
def __call__(self, image): if not hasattr(self.__class__, "model"): model = self.build_model(self.enable_image_captions) setattr(self.__class__, "model", model) att_embed = self.model["att_embed"] encoder = self.model["encoder"] use_autocast = False try: from torch.cuda.amp import autocast use_autocast = "cuda" in str(self.device) except: pass use_autocast = use_autocast and get_global("use_autocast") with torch.no_grad(): if use_autocast: with autocast(enabled=False): img_feature = self.get_img_details(image)[0].to( self.device) att_feats = att_embed(img_feature[None]) att_masks = att_feats.new_ones(att_feats.shape[:2], dtype=torch.long) att_masks = att_masks.unsqueeze(-2) em = encoder(att_feats, att_masks) return em else: img_feature = self.get_img_details(image)[0].to(self.device) att_feats = att_embed(img_feature[None]) att_masks = att_feats.new_ones(att_feats.shape[:2], dtype=torch.long) att_masks = att_masks.unsqueeze(-2) em = encoder(att_feats, att_masks) return em
def add_calories_button_clicked(self): if self.calories_blank_line.text() != '': calories_add = int(self.calories_blank_line.text()) excel_write(calories_add) self.calories_blank_line.clear() if json_load() != 0: self.calories_left_label.setText("Calories left for Today: " + str(get_global("calorie_goal") - read_cell(json_load(), 2)))
def start_day(self): # write_today_date_to_excel() if write_today_date_to_excel(): self.calories_left_label.setText("Calories left for Today: " + str(get_global("calorie_goal"))) # json_reset() book = load_workbook("diet_diary.xlsx") sheet = book.active a = 2 check_today_row(sheet, a)
def excel_write(calories_add): book = load_workbook("diet_diary.xlsx") sheet = book.active target_row = json_load() if target_row is not None: if sheet.cell(target_row, 2).value is None: sheet.cell(target_row, 2).value = 0 sheet.cell(target_row, 2).value += calories_add sheet.cell(target_row, 3).value = sheet.cell(target_row, 2).value - get_global("calorie_goal") book.save("diet_diary.xlsx")
def __call__(self, url): if not hasattr(self.__class__, 'predictor'): from detectron2.engine import DefaultPredictor predictor = DefaultPredictor(self.cfg) setattr(self.__class__, "predictor", predictor) print(self.__class__.__name__, ": Loaded Model...") autocast_supported = False try: from torch.cuda.amp import autocast autocast_supported = "cuda" in str(self.device) except: pass if autocast_supported: with autocast( enabled=self.do_autocast and get_global("use_autocast")): detectron_features = self.doit( url, self.do_autocast and get_global("use_autocast")) else: detectron_features = self.doit(url, False) return detectron_features
def get_detectron_features(self, image_path): from maskrcnn_benchmark.structures.image_list import to_image_list _ = gc.collect() im, im_scale, im_info = self._image_transform(image_path) img_tensor, im_scales = [im], [im_scale] current_img_list = to_image_list(img_tensor, size_divisible=32) current_img_list = current_img_list.to(self.device) with torch.no_grad(): use_autocast = False try: from torch.cuda.amp import autocast use_autocast = "cuda" in str(self.device) except: pass use_autocast = use_autocast and get_global("use_autocast") if use_autocast: with autocast(enabled=False): output = self.detection_model(current_img_list) else: output = self.detection_model(current_img_list) feat_list, info_list = self._process_feature_extraction_v2( output, im_scales, [im_info], 'fc6') return feat_list[0], info_list[0]
def persistent_caching_fn(fn, name, check_cache_exists=False, cache_dir=None, cache_dirs=None, cache_allow_writes=True, retries=2) -> Callable: wait_time = 0.25 random_time = 0.25 try: cache_dirs = get_global( "cache_dirs") if cache_dirs is None else cache_dirs except: if cache_dirs is None: cache_dir = get_global( "cache_dir") if cache_dir is None else cache_dir cache_dirs = [cache_dir] try: cache_allow_writes = get_global("cache_allow_writes") except: pass try: cache_stats = get_global("cache_stats") except: cache_stats = defaultdict(lambda: defaultdict(float)) set_global("cache_stats", cache_stats) cache_stats["count_cache_dirs"] = len(set(cache_dirs)) cache_stats["cache_dirs"] = set(cache_dirs) from diskcache import Cache import joblib if check_cache_exists: assert os.path.exists(cache_dir) and os.path.isdir(cache_dir) cache_file = os.path.join(cache_dir, "cache.db") assert os.path.exists(cache_file) and os.path.isfile(cache_file) else: if os.path.exists(cache_dir): assert os.path.isdir(cache_dir) else: os.mkdir(cache_dir) args = dict(eviction_policy='none', sqlite_cache_size=2**16, sqlite_mmap_size=2**28, disk_min_file_size=2**18) caches = [Cache(cd, **args) for cd in cache_dirs] shuffle(caches) try: import inspect fnh = joblib.hashing.hash(name, 'sha1') + joblib.hashing.hash( inspect.getsourcelines(fn)[0], 'sha1') + joblib.hashing.hash( fn.__name__, 'sha1') except Exception as e: try: fnh = joblib.hashing.hash(name, 'sha1') + joblib.hashing.hash( fn.__name__, 'sha1') except Exception as e: fnh = joblib.hashing.hash(name, 'sha1') def build_hash(*args, **kwargs): hsh = fnh + joblib.hashing.hash(args, 'sha1') if len(kwargs) > 0: hsh = hsh + joblib.hashing.hash(kwargs, 'sha1') return hsh def read_hash(hsh): ts = time.time() kes = [0] * len(caches) for retry in range(retries): for cidx, cache in enumerate(caches): if kes[cidx] == 1: continue try: r = cache[hsh] cache_stats[name]["hit"] += 1 te = time.time() - ts cache_stats[name]["read_time"] = 0.9 * cache_stats[name][ "read_time"] + 0.1 * te return r except KeyError: cache_stats[name]["key_error"] += 1 kes[cidx] = 1 except Exception as e: cache_stats[name]["read_exception"] += 1 cache_stats[name]["read_retries"] += 1 sleep(wait_time * (retry + 1) + random() * random_time) sleep(wait_time * (retry + 1) + random() * random_time) cache_stats[name]["read-return-none"] += 1 if sum(kes) >= 1: return "ke", kes return None def write_hsh(hsh, r, kes): ts = time.time() if cache_allow_writes: for retry in range(retries): for cidx, cache in enumerate(caches): if kes[cidx] != 1: continue try: cache[hsh] = r cache_stats[name]["writes"] += 1 te = time.time() - ts cache_stats[name]["write_time"] = 0.9 * cache_stats[ name]["write_time"] + 0.1 * te return r except: cache_stats[name]["write_exception"] += 1 sleep(wait_time * (retry + 1) + random() * random_time) cache_stats[name]["write_retries"] += 1 sleep(wait_time * (retry + 1) + random() * random_time) def cfn(*args, **kwargs): ignore_cache = kwargs.pop("ignore_cache", False) if ignore_cache: r = fn(*args, **kwargs) return r hsh = build_hash(*args, **kwargs) cache_stats[name]["called"] += 1 r = read_hash(hsh) if r is not None: if not isinstance(r, tuple) or (isinstance(r, tuple) and r[0] != "ke"): return r if r is None: r = fn(*args, **kwargs) cache_stats[name]["re-compute"] += 1 cache_stats[name]["re-compute-cache-busy-no-write"] += 1 return r kes = r[1] r = fn( *args, **kwargs ) # r is not None and there was key-error so we need to calculate the key and put in cache cache_stats[name]["compute"] += 1 write_hsh(hsh, r, kes) return r return cfn