def train(X, y, epochs=10, batch_size=16): dataset = IrisDataset(X, y) num_examples = len(dataset) loader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True) print(X.shape[1]) model = IrisNet(input_dim=X.shape[1]) optimizer = torch.optim.Adam(model.parameters(), lr=0.01) criterion = torch.nn.CrossEntropyLoss() for epoch in range(1, epochs + 1): num_correct = 0 for i, (inputs, labels) in enumerate(loader): optimizer.zero_grad() outputs = model(inputs) loss = criterion(outputs, labels) num_correct += (labels == outputs.argmax(1)).sum() loss.backward() optimizer.step() print( f"Finished: {epoch}, accuracy: {round(num_correct.float().numpy() / num_examples, 4) * 100}%" ) return model
def run(device): net = IrisNet() net.to(device) optimizer = Adam(net.parameters(), lr=args.lr) criterion = nn.CrossEntropyLoss() train_set = IrisDataset(args.dpath) train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=True) losses = [] accuracy = [] # Begin training for ep in range(args.epochs): ep_loss = 0 ep_acc = 0 for i, (inputs, labels) in enumerate(train_loader): inputs = inputs.float().to(device) labels = labels.to(device) optimizer.zero_grad() outputs, probs = net(inputs) loss = criterion(outputs, labels) loss.backward() optimizer.step() ep_loss += loss.item() * len(inputs) ep_acc += sum(probs.argmax(dim=-1) == labels).item() losses.append(ep_loss / len(train_set)) accuracy.append(ep_acc / len(train_set)) return net, losses, accuracy
def __init__(self, config): bucket, key = re.match("s3://(.+?)/(.+)", config["model"]).groups() s3 = boto3.client("s3") s3.download_file(bucket, key, "model.pth") model = IrisNet() model.load_state_dict(torch.load("model.pth")) model.eval() self.model = model
def __init__(self, config): # download the model bucket, key = re.match("s3://(.+?)/(.+)", config["model"]).groups() s3 = boto3.client("s3", config=Config(signature_version=UNSIGNED)) s3.download_file(bucket, key, "/tmp/model.pth") # initialize the model model = IrisNet() model.load_state_dict(torch.load("/tmp/model.pth")) model.eval() self.model = model
def __init__(self, config): # download the model bucket, key = re.match("s3://(.+?)/(.+)", config["model"]).groups() if os.environ.get("AWS_ACCESS_KEY_ID"): s3 = boto3.client("s3") # client will use your credentials if available else: s3 = boto3.client("s3", config=Config(signature_version=UNSIGNED)) # anonymous client s3.download_file(bucket, key, "/tmp/model.pth") # initialize the model model = IrisNet() model.load_state_dict(torch.load("/tmp/model.pth")) model.eval() self.model = model
import re import torch from model import IrisNet model = IrisNet() def init(model_path, metadata): model.load_state_dict(torch.load(model_path)) model.eval() labels = ["iris-setosa", "iris-versicolor", "iris-virginica"] def predict(payload, metadata): input_tensor = torch.FloatTensor([[ payload["sepal_length"], payload["sepal_width"], payload["petal_length"], payload["petal_width"], ]]) output = model(input_tensor) return labels[torch.argmax(output[0])]