def deit_small_patch16_224_ctx_product_50_shared_k(pretrained=False, **kwargs): # DeiT-Small with relative position encoding (Contextual Product method) rpe_config = get_rpe_config( ratio=1.9, method=METHOD.PRODUCT, mode='ctx', shared_head=True, skip=1, rpe_on='k', ) return deit_small_patch16_224(pretrained=pretrained, rpe_config=rpe_config, **kwargs)
def deit_small_patch16_224_ctx_quant_51_shared_k(pretrained=False, **kwargs): # DeiT-Small with relative position encoding (Contextual Quantization method) rpe_config = get_rpe_config( ratio=33, method=METHOD.QUANT, mode='ctx', shared_head=True, skip=1, rpe_on='k', ) return deit_small_patch16_224(pretrained=pretrained, rpe_config=rpe_config, **kwargs)
def deit_small_patch16_224_ctx_cross_56_shared_k(pretrained=False, **kwargs): # DeiT-Small with relative position encoding (Contextual Cross method) rpe_config = get_rpe_config( ratio=20, method=METHOD.CROSS, mode='ctx', shared_head=True, skip=1, rpe_on='k', ) return deit_small_patch16_224(pretrained=pretrained, rpe_config=rpe_config, **kwargs)
def deit_small_patch16_224_ctx_euc_20_shared_k(pretrained=False, **kwargs): # DeiT-Small with relative position encoding (Contextual Euclidean method) rpe_config = get_rpe_config( ratio=20, method=METHOD.EUCLIDEAN, mode='ctx', shared_head=True, skip=1, rpe_on='k', ) return deit_small_patch16_224(pretrained=pretrained, rpe_config=rpe_config, **kwargs)