model = nn.Sequential( layer, nn.Flatten(), nn.Linear(in_features=args.channels * args.numf**DIM, out_features=10)) train_dataset = give(DIM, args.numf, args.channels) if args.nodes > 1: model, train_loader = lib_torch.distribute(model, train_dataset, args.nodes, args.batch) else: train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=args.batch, shuffle=True) time = lib_torch.profile(['batch_norm', 'NativeBatchNormBackward'], model, train_loader, args.epochs) import numpy as np data = np.array([[ args.epochs, tor_data.ds_size, # dataset size args.numf, args.channels, args.batch, args.nodes, time ]]) with open('norm{}d.ptorch'.format(DIM), 'a') as file: np.savetxt(file, data, delimiter=",", fmt="%s")
layer = nn.Linear(in_features=args.numf, out_features=args.units) model = nn.Sequential( nn.Flatten(), nn.Linear(in_features=args.numf, out_features=args.units)) train_dataset = give(1, args.numf, 1, out_size=args.units) if args.nodes > 1: model, train_loader = lib_torch.distribute(model, train_dataset, args.nodes, args.batch) else: train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=args.batch, shuffle=True) time = lib_torch.profile(['addmm', 'AddmmBackward'], model, train_loader, args.epochs) import numpy as np data = np.array([[ args.epochs, tor_data.ds_size, # dataset size args.numf, args.batch, args.nodes, args.units, time ]]) with open('final_dense.ptorch', 'a') as file: np.savetxt(file, data, delimiter=",", fmt="%s")
model = nn.Sequential( nn.Flatten(), nn.Linear(in_features=args.channels * args.numf**DIM, out_features=10)) train_dataset = give(DIM, args.numf, args.channels) if args.nodes > 1: model, train_loader = lib_torch.distribute(model, train_dataset, args.nodes, args.batch) else: train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=args.batch, shuffle=True) time = lib_torch.profile(['flatten'], model, train_loader, args.epochs) import numpy as np data = np.array([[ args.epochs, tor_data.ds_size, # dataset size args.numf, args.channels, args.batch, args.nodes, time ]]) with open('flatten{}d.ptorch'.format(DIM), 'a') as file: np.savetxt(file, data, delimiter=",", fmt="%s")
out_features = 10 ) ) train_dataset = give(DIM, args.numf, args.channels) if args.nodes > 1: model, train_loader = lib_torch.distribute(model, train_dataset, args.nodes, args.batch) else: train_loader = torch.utils.data.DataLoader( dataset = train_dataset, batch_size = args.batch, shuffle = True ) time = lib_torch.profile(['max_pool{}d'.format(DIM)], model, train_loader, args.epochs) import numpy as np data = np.array([[ args.epochs, tor_data.ds_size, # dataset size args.numf, args.channels, args.batch, args.nodes, args.pool, args.stride, time ]]) with open('max{}d.ptorch'.format(DIM),'a') as file: np.savetxt(file, data, delimiter=",", fmt="%s")
model = nn.Sequential( layer, nn.Flatten(), nn.Linear(in_features=args.channels * args.numf**DIM, out_features=10)) train_dataset = give(DIM, args.numf, args.channels) if args.nodes > 1: model, train_loader = lib_torch.distribute(model, train_dataset, args.nodes, args.batch) else: train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=args.batch, shuffle=True) time = lib_torch.profile(['relu'], model, train_loader, args.epochs) import numpy as np data = np.array([[ args.epochs, tor_data.ds_size, # dataset size args.numf, args.channels, args.batch, args.nodes, time ]]) with open('relu{}d.ptorch'.format(DIM), 'a') as file: np.savetxt(file, data, delimiter=",", fmt="%s")
nn.Linear( in_features=args.filters * lib_torch.conv_size_out(args.numf, args.kernel, args.stride)**DIM, out_features=10)) train_dataset = give(DIM, args.numf, args.channels) if args.nodes > 1: model, train_loader = lib_torch.distribute(model, train_dataset, args.nodes, args.batch) else: train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=args.batch, shuffle=True) time = lib_torch.profile(['conv{}d'.format(DIM), 'MkldnnConvolutionBackward'], model, train_loader, args.epochs) import numpy as np data = np.array([[ args.epochs, tor_data.ds_size, # dataset size args.numf, args.channels, args.batch, args.nodes, args.kernel, args.stride, args.filters, time ]])
model = nn.Sequential( layer, nn.Flatten(), nn.Linear(in_features=args.channels * args.numf**DIM, out_features=10)) train_dataset = give(DIM, args.numf, args.channels) if args.nodes > 1: model, train_loader = lib_torch.distribute(model, train_dataset, args.nodes, args.batch) else: train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=args.batch, shuffle=True) time = lib_torch.profile(['dropout', 'feature_dropout'], model, train_loader, args.epochs) import numpy as np data = np.array([[ args.epochs, tor_data.ds_size, # dataset size args.numf, args.channels, args.batch, args.nodes, args.drop, time ]]) with open('drop{}d.ptorch'.format(DIM), 'a') as file: np.savetxt(file, data, delimiter=",", fmt="%s")