def get_params(): global trainCSV params = request.form.to_dict() priceSMS = float(params['priceSMS']) priceEmail = float(params['priceEmail']) if priceEmail == 0 or priceSMS == 0: return jsonify({'status': 'Not OK'}) sum_to = float(params['sum']) withSMS = params['withSMS'] == 'true' withEmail = params['withEmail'] == 'true' testCSV = BytesIO(base64.b64decode(params['fileTest'])) model = Model() if trainCSV == '': model.load_prev() else: trainCSV = BytesIO(base64.b64decode(trainCSV)) model.fit(trainCSV) model.apply(testCSV) emails, smss, percents, responses, x = model.optimize( priceSMS, priceEmail, sum_to, withSMS, withEmail) return jsonify({ 'status': 'OK', 'emails': emails, 'smss': smss, 'percents': percents, 'responses': responses, 'x': x })
def setup_model(config, embeddings): """ Sets up the model for training/evaluation. The architecture here extends on the architecture introduced in the ABCNN paper by allowing for multiple convolutional layers with different window sizes (computed in parallel, not in series). Args: config: dict Contains the information needed to setup the model. embeddings: nn.Embedding The embedding matrix for the model. Returns: model: Model The instantiated model. """ print("Creating the ABCNN model...") # Create the layers embeddings_size = config["embeddings"]["size"] max_length = config["max_length"] layer_configs = config["layers"] use_all_layer_outputs = config["use_all_layer_outputs"] # Initialize the layers layers = [] layer_sizes = [embeddings_size] for layer_config in layer_configs: layer, layer_size = setup_layer(max_length, layer_config) layers.append(layer) layer_sizes.append(layer_size) # Compute the size of the FC layer final_size = 2 * sum( layer_sizes) if use_all_layer_outputs else 2 * layer_sizes[-1] # Put it all together model = Model(embeddings, layers, use_all_layer_outputs, final_size).float() model.apply(weights_init) return model