Пример #1
0
def loss_func(h_net, input: torch.Tensor):
    ones = torch.unsqueeze(
        torch.ones(len(input),
                   dtype=richards_celia.DTYPE,
                   device=richards_celia.DEVICE), 1)

    predicted_h_trial = h_net(tz_pairs)  # ts(input)
    predicted_h_initial_boundary = h_net(
        tz_pairs_initial_boundary)  # ts(tz_pairs_initial_boundary)
    predicted_h_bottom_boundary = h_net(tz_pairs_bottom_boundary)
    predicted_h_top_boundary = h_net(tz_pairs_top_boundary)

    # predicted_h_top_boundary_d = torch.autograd.grad(
    #     predicted_h_top_boundary,
    #     tz_pairs_top_boundary,
    #     create_graph=True,
    #     grad_outputs=torch.unsqueeze(
    #         torch.ones(len(tz_pairs_top_boundary), dtype=richards_celia.DTYPE, device=richards_celia.DEVICE), 1)
    # )[0]
    # predicted_h_top_boundary_dz = predicted_h_top_boundary_d[:, 1:2]
    #
    # predicted_q_top_boundary = K(predicted_h_top_boundary) * (1 - predicted_h_top_boundary_dz)

    predicted_h_d = torch.autograd.grad(predicted_h_trial,
                                        input,
                                        create_graph=True,
                                        grad_outputs=ones)[0]
    predicted_h_dz = predicted_h_d[:, 1:2]

    predicted_theta = theta(predicted_h_trial)
    predicted_K = K(predicted_h_trial)

    predicted_theta_d = torch.autograd.grad(
        predicted_theta,
        input,
        create_graph=True,
        grad_outputs=ones,
    )[0]
    predicted_theta_dt = predicted_theta_d[:, 0:1]

    predicted_second_term = predicted_K * predicted_h_dz
    predicted_second_term_d = torch.autograd.grad(
        predicted_second_term,
        input,
        create_graph=True,
        grad_outputs=ones,
    )[0]
    predicted_second_term_dz = predicted_second_term_d[:, 1:2]

    predicted_K_d = torch.autograd.grad(
        predicted_K,
        input,
        create_graph=True,
        grad_outputs=ones,
    )[0]
    predicted_K_dz = predicted_K_d[:, 1:2]

    # @TODO check the signs here
    residual = predicted_theta_dt - predicted_second_term_dz - predicted_K_dz

    residual_h_initial_boundary = predicted_h_initial_boundary - h_initial_boundary
    residual_h_bottom_boundary = predicted_h_bottom_boundary - h_bottom_boundary
    residual_h_top_boundary = predicted_h_top_boundary - h_top_boundary
    # residual_q_top_boundary = predicted_q_top_boundary - q_top_boundary

    # fitting_error = torch.unsqueeze(torch.zeros(len(input), dtype=richards_celia.DTYPE, device=richards_celia.DEVICE),1)
    # fitting_error = predicted_wrc - theta
    # assert (fitting_error == fitting_error).any()
    #  + torch.sum(fitting_error ** 2) \

    pde_res = torch.sum(residual**2) / len(residual)
    boundary_bottom_res = torch.sum(residual_h_bottom_boundary**
                                    2) / len(residual_h_bottom_boundary)
    boundary_top_res = torch.sum(residual_h_top_boundary**
                                 2) / len(residual_h_top_boundary)
    # boundary_top_res = torch.sum(residual_q_top_boundary ** 2) / len(residual_q_top_boundary)
    boundary_initial_res = torch.sum(residual_h_initial_boundary**
                                     2) / len(residual_h_initial_boundary)
    print("pde_res: " + str(pde_res))
    print("boundary_bottom_res: " + str(boundary_bottom_res))
    print("boundary_top_res: " + str(boundary_top_res))
    print("boundary_initial_res: " + str(boundary_initial_res))
    loss = pde_res + (boundary_bottom_res + boundary_top_res +
                      boundary_initial_res)
    # loss = pde_res + boundary_initial_res
    # assert (loss == loss).any()
    h_net.pde_loss += [pde_res]
    h_net.bc_initial_losses += [boundary_initial_res]
    h_net.bc_top_losses += [boundary_top_res]
    h_net.bc_bottom_losses += [boundary_bottom_res]
    h_net.losses += [loss]

    return loss
Пример #2
0
def loss_func(input: torch.tensor):
    ones = torch.unsqueeze(torch.ones(len(input), dtype=richards_celia.DTYPE, device=richards_celia.DEVICE), 1)

    # predicted_h = h_net(tz_pairs)
    predicted_h_trial = h_net(input)
    predicted_h_initial_boundary = h_net(tz_pairs_initial_boundary)
    predicted_h_bottom_boundary = h_net(tz_pairs_bottom_boundary)
    predicted_h_top_boundary = h_net(tz_pairs_top_boundary)

    predicted_h_d = torch.autograd.grad(
        predicted_h_trial,
        input,
        create_graph=True,
        grad_outputs=ones
    )[0]
    predicted_h_dz = predicted_h_d[:, 1:2]

    predicted_theta = theta(predicted_h_trial)
    predicted_K = K(predicted_h_trial)

    predicted_theta_d = torch.autograd.grad(
        predicted_theta,
        input,
        create_graph=True,
        grad_outputs=ones,
    )[0]
    predicted_theta_dt = predicted_theta_d[:, 0:1]

    predicted_second_term = predicted_K * predicted_h_dz
    predicted_second_term_d = torch.autograd.grad(
        predicted_second_term,
        input,
        create_graph=True,
        grad_outputs=ones,
    )[0]
    predicted_second_term_dz = predicted_second_term_d[:, 1:2]

    predicted_K_d = torch.autograd.grad(
        predicted_K,
        input,
        create_graph=True,
        grad_outputs=ones,
    )[0]
    predicted_K_dz = predicted_K_d[:, 1:2]

    residual = predicted_theta_dt - predicted_second_term_dz - predicted_K_dz

    residual_h_initial_boundary = predicted_h_initial_boundary - h_initial_boundary
    residual_h_bottom_boundary = predicted_h_bottom_boundary - h_bottom_boundary
    residual_h_top_boundary = predicted_h_top_boundary - h_top_boundary

    # fitting_error = torch.unsqueeze(torch.zeros(len(input), dtype=richards_celia.DTYPE, device=richards_celia.DEVICE),1)
    # fitting_error = predicted_wrc - theta
    # assert (fitting_error == fitting_error).any()
    #  + torch.sum(fitting_error ** 2) \

    loss = torch.sum(residual ** 2) / len(residual) \
           + torch.sum(residual_h_initial_boundary ** 2) / len(residual_h_initial_boundary) \
           + torch.sum(residual_h_bottom_boundary ** 2) / len(residual_h_bottom_boundary) \
           + torch.sum(residual_h_top_boundary ** 2) / len(residual_h_top_boundary)
    # assert (loss == loss).any()

    return loss