예제 #1
0
from train_util import train_model
from dog_cat import load_datasets, prepare_datasets
from network import DistributionConvolutionModelKReluGradientMuRandom
from show_history import plot_history

if os.path.isdir("../model") == False:
  os.mkdir("../model")

prepared = True
epochs = 300
device_name = "cuda"
learning_rate = 0.001
batch_size = 32
image_size = (100, 100)
model = DistributionConvolutionModelKReluGradientMuRandom(image_size=image_size)

device = torch.device(device_name)
model.to(device)

criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=learning_rate)

if prepared == False:
  prepare_datasets(image_size)
x_train, y_train, x_test, y_test = load_datasets()

print("Start Training")
model, history = train_model(model, criterion, optimizer, epochs, x_train, y_train, x_test, y_test, device, batch_size)

with open("../model/model_history.pkl", "wb") as f:
예제 #2
0
import torch
from distribution_3d import plot_distribution, plot_distribution_2d
from network import DistributionConvolutionModelKReluGradientMuRandom
import matplotlib.pyplot as plt

model = DistributionConvolutionModelKReluGradientMuRandom(image_size=(100,
                                                                      100))
model.load_state_dict(
    torch.load("../model/model_final.pth")["model_state_dict"])

layers = [[model.conv1_1, model.conv1_2], [model.conv2_1, model.conv2_2],
          [model.conv3_1, model.conv3_2]]
shapes = [[4, 8], [8, 8], [8, 16]]

for i, layer in enumerate(layers):
    for j, conv in enumerate(layer):
        print("conv{}_{}.png".format(i + 1, j + 1))
        distributions = conv.distribution.detach().numpy()
        print(distributions.shape)

        figure, graphs = plt.subplots(*shapes[i])
        print(distributions.shape)
        print(graphs.shape)
        print(shapes[i][0] * shapes[i][1])

        for graph, distribution in zip(
                graphs.reshape(shapes[i][0] * shapes[i][1]), distributions):
            graph.matshow(distribution)
            graph.set_xticklabels([])
            graph.set_yticklabels([])
            graph.axis("off")
예제 #3
0
import torch
from distribution_3d import plot_distribution
from network import DistributionConvolutionModelKReluGradientMuRandom
import matplotlib.pyplot as plt

model = DistributionConvolutionModelKReluGradientMuRandom()
model.load_state_dict(torch.load("../model/model_final.pth"), strict=False)
distribution = model.conv3_1.distribution.detach().numpy()[2]

plot_distribution(distribution)