def __init__(self, v_dim, q_dim, num_hid, norm='weight', act='LeakyReLU', dropout=0.3): super(Att_PD_layer2, self).__init__() self.nonlinear_1 = FCNet([v_dim + q_dim, num_hid], output_dropout=0.3) self.nonlinear_2 = FCNet([num_hid, num_hid], output_dropout=0.3) self.nonlinear_gate_1 = FCNet_sigmoid([v_dim + q_dim, num_hid]) self.nonlinear_gate_2 = FCNet_sigmoid([num_hid, num_hid]) self.linear = nn.Linear(num_hid, 1)
def __init__(self, v_dim, q_dim, num_hid, norm='weight', act='LeakyReLU', dropout=0.3): super(Att_3S_layer2, self).__init__() # norm_layer = get_norm(norm) self.v_proj = FCNet([v_dim, num_hid], output_dropout=0.3) self.q_proj = FCNet([q_dim, num_hid], output_dropout=0.3) self.nonlinear = FCNet([num_hid, num_hid], output_dropout=0.3) self.linear = nn.Linear(num_hid, 1)
def __init__(self, v_dim, q_dim, num_hid, norm='weight', act='LeakyReLU', dropout=0.3): super(Att_4_layer2_keycat_textual_visual, self).__init__() # norm_layer = get_norm(norm) self.v_proj = FCNet([v_dim, num_hid], output_dropout=0.3) self.q_proj = FCNet([q_dim, num_hid], output_dropout=0.3) # self.v_linear = nn.Linear(num_hid, num_hid) # self.q_linear = nn.Linear(num_hid, num_hid) # self.v_proj = torch.nn.Sequential( # nn.Linear(v_dim, num_hid), # torch.nn.Dropout(0.3, inplace=False) # ) # self.q_proj = torch.nn.Sequential( # nn.Linear(q_dim, num_hid), # torch.nn.Dropout(0.3, inplace=False) # ) self.num_hid = num_hid
def __init__(self, v_dim, q_dim, num_hid): super(Att_0_layer2, self).__init__() self.nonlinear = FCNet([v_dim + q_dim, num_hid], output_dropout=0.3) self.linear = nn.Linear(num_hid, 1)
def __init__(self, v_dim, q_dim, num_hid, norm='weight', act='LeakyReLU', dropout=0.3): super(Att_4_layer2_huge_negative_mask, self).__init__() # norm_layer = get_norm(norm) self.v_proj = FCNet([v_dim, num_hid], output_dropout=0.3) self.q_proj = FCNet([q_dim, num_hid], output_dropout=0.3)
def __init__(self, v_dim, q_dim, num_hid): super(Att_2_layer2, self).__init__() self.v_proj = FCNet([v_dim, num_hid], output_dropout=0.3) self.q_proj = FCNet([q_dim, num_hid], output_dropout=0.3) self.linear = nn.Linear(num_hid, 1)
def __init__(self, v_dim, q_dim, num_hid, norm='weight', act='LeakyReLU', dropout=0.3): super(Att_1_layer2_keycat_textual_visual, self).__init__() self.nonlinear_1 = FCNet([v_dim + q_dim, num_hid], output_dropout=0.3) self.nonlinear_2 = FCNet([num_hid, num_hid], output_dropout=0.3) self.linear = nn.Linear(num_hid, 1)
def __init__(self, query_size, key_size, hidden_size): super().__init__() self.query_proj = FCNet([query_size, hidden_size], output_dropout=0.3) self.key_proj = FCNet([key_size, hidden_size], output_dropout=0.3)