def __init__( self, sorter_type, seq_len=None, sorter_state_dict=None, ): super(RankLoss, self).__init__() self.sorter = model_loader(sorter_type, seq_len, sorter_state_dict)
def __init__(self, sorter_type, seq_len=None, sorter_state_dict=None, margin=0.2, nmax=1): super(RankHardLoss, self).__init__() self.nmax = nmax self.margin = margin self.sorter = model_loader(sorter_type, seq_len, sorter_state_dict)
def __init__(self, sorter_type, seq_len=None, sorter_state_dict=None, lbd=0): super(SpearmanLoss, self).__init__() self.sorter = model_loader(sorter_type, seq_len, sorter_state_dict) self.criterion_mse = torch.nn.MSELoss() self.criterionl1 = torch.nn.L1Loss() self.lbd = lbd
from flask import Flask from flask import jsonify from flask import request import numpy as np from model import model_loader app = Flask(__name__) model = model_loader('model.tflite') # API MAIN STRUCTURE: @app.route('/api/', methods=['GET']) def test(): """ GET method to test the API. """ # Output message: message = {"response": [{"text": "Hello world!"}]} return jsonify(message) @app.route('/api/predict', methods=['POST']) def predict(): """ POST method to predict using the trained model. """ data = request.get_json() values = np.array(data['input'][0]['values'], dtype=np.float32)
from flask import Flask from flask import jsonify from flask import request import numpy as np from model import model_loader app = Flask(__name__) model = model_loader('../model.tflite') # API MAIN STRUCTURE: @app.route('/api/', methods=['GET']) def test(): """ GET method to test the API. """ # Output message: message = {"response": [{"text": "Hello world!"}]} return jsonify(message) @app.route('/api/predict', methods=['POST']) def predict(): """ POST method to predict using the trained model. """ data = request.get_json() values = np.array(data['input'][0]['values'], dtype=np.float32)
dset = SeqDataset(args.seq_len, dist=args.dist) train_loader = DataLoader(dset, batch_size=args.batch_size, shuffle=False, num_workers=2, sampler=SubsetRandomSampler( range(int(len(dset) * 0.1), len(dset)))) val_loader = DataLoader(dset, batch_size=args.batch_size, shuffle=False, num_workers=2, sampler=SubsetRandomSampler( range(int(len(dset) * 0.1)))) model = model_loader(args.model_type, args.seq_len) model.to(device) optimizer = torch.optim.Adam(model.parameters(), lr=args.lr) lr_scheduler = StepLR(optimizer, args.lr_steps, 0.5) criterion = nn.L1Loss() print("Nb parameters:", count_parameters(model)) start_epoch = 0 best_rec = 10000 for epoch in range(start_epoch, args.mepoch): is_best = False lr_scheduler.step() train_loss, batch_train, data_train = train(