def evaluate(model, search, seq_dict, run_box_head, run_cls_head): """Compute FLOPs, Params, and Speed""" custom_ops = {nn.MultiheadAttention: get_complexity_MHA} # # backbone macs1, params1 = profile(model, inputs=(search, None, "backbone", False, False), custom_ops=None, verbose=False) macs, params = clever_format([macs1, params1], "%.3f") print('backbone (search) macs is ', macs) print('backbone params is ', params) # transformer and head macs2, params2 = profile(model, inputs=(None, seq_dict, "transformer", True, True), custom_ops=custom_ops, verbose=False) macs, params = clever_format([macs2, params2], "%.3f") print('transformer and head macs is ', macs) print('transformer and head params is ', params) # the whole model macs, params = clever_format([macs1 + macs2, params1 + params2], "%.3f") print('overall macs is ', macs) print('overall params is ', params) '''Speed Test''' T_w = 10 T_t = 100 print("testing speed ...") with torch.no_grad(): # overall for i in range(T_w): _ = model(search, None, "backbone", run_box_head, run_cls_head) _ = model(None, seq_dict, "transformer", run_box_head, run_cls_head) start = time.time() for i in range(T_t): _ = model(search, None, "backbone", run_box_head, run_cls_head) _ = model(None, seq_dict, "transformer", run_box_head, run_cls_head) end = time.time() avg_lat = (end - start) / T_t print("The average overall latency is %.2f ms" % (avg_lat * 1000)) # backbone for i in range(T_w): _ = model(search, None, "backbone", run_box_head, run_cls_head) start = time.time() for i in range(T_t): _ = model(search, None, "backbone", run_box_head, run_cls_head) end = time.time() avg_lat = (end - start) / T_t print("The average backbone latency is %.2f ms" % (avg_lat * 1000))
def evaluate(model, img_x, att_x, q, k, v, key_padding_mask): """Compute FLOPs, Params, and Speed""" # backbone macs1, params1 = profile(model, inputs=(img_x, att_x, None, None, None, None, "backbone", "search"), custom_ops=None, verbose=False) macs, params = clever_format([macs1, params1], "%.3f") print('backbone (search) macs is ', macs) print('backbone params is ', params) # transformer and head macs2, params2 = profile(model, inputs=(None, None, q, k, v, key_padding_mask, "transformer", "search"), custom_ops=None, verbose=False) macs, params = clever_format([macs2, params2], "%.3f") print('transformer and head macs is ', macs) print('transformer and head params is ', params) # the whole model macs, params = clever_format([macs1 + macs2, params1 + params2], "%.3f") print('overall macs is ', macs) print('overall params is ', params)
def main(): cfg.merge_from_file('./models/config/config.yaml') model = ModelBuilder() x = torch.randn(1, 3, 255, 255) zf = torch.randn(1, 3, 127, 127) model.template(zf) macs, params = profile(model, inputs=(x,), verbose = False) macs, params = clever_format([macs, params], "%.3f") print('overall macs is ', macs) print('overall params is ', params)
'reg': torch.randn(1, 128, 16, 16) } oup = model(x, zf) # custom_ops = { # Conv2dDynamicSamePadding: count_convNd, # Conv2dStaticSamePadding: count_convNd, # MemoryEfficientSwish: zero_ops, # } # compute FLOPs and Params # the whole model macs, params = profile(model, inputs=(x, zf), custom_ops=None, verbose=False) macs, params = clever_format([macs, params], "%.3f") print('overall macs is ', macs) print('overall params is ', params) # backbone macs, params = profile(backbone, inputs=(x, ), custom_ops=None, verbose=False) macs, params = clever_format([macs, params], "%.3f") print('backbone macs is ', macs) print('backbone params is ', params) # head macs, params = profile(head, inputs=(inp, ), verbose=False) macs, params = clever_format([macs, params], "%.3f") print('head macs is ', macs) print('head params is ', params)
def test_clever_format_returns_formatted_number(self): nums = 1 format = "%.2f" clever_nums = utils.clever_format(nums, format) assert clever_nums == '1.00B'
def test_clever_format_returns_formatted_numbers(self): nums = [1, 2] format = "%.2f" clever_nums = utils.clever_format(nums, format) assert clever_nums == ('1.00B', '2.00B')