from fastapi import Request, FastAPI
from pydantic import BaseModel

from MLP import MLP
from dataProcessing import load_data
from federatedLearning import try_to_update


class Item(BaseModel):
    layer: int
    weights: list
    data_qtd: int


global_model = MLP()
global_model.build()

app = FastAPI()
clients_weights = {}


@app.get("/get_weights/")
async def get_weights(layer: int):
    return {layer: global_model.get_weights()[layer].tolist()}


@app.post("/send_weights/")
async def send_weights(item: Item, request: Request):
    if request.client.host in clients_weights:
        clients_weights[request.client.host][item.layer] = item.weights
    else:
    if configs.learner.lower() == "adagrad":
        model.compile(optimizer=optimizers.Adagrad(lr=configs.learning_rate), loss='binary_crossentropy')
    elif configs.learner.lower() == "rmsprop":
        model.compile(optimizer=optimizers.RMSprop(lr=configs.learning_rate), loss='binary_crossentropy')
    elif configs.learner.lower() == "adam":
        model.compile(optimizer=optimizers.Adam(lr=configs.learning_rate), loss='binary_crossentropy')
    else:
        model.compile(optimizer=optimizers.SGD(lr=configs.learning_rate), loss='binary_crossentropy')

    # -----------Load pretrain model-----------
    if configs.mf_pretrain != '' and configs.mlp_pretrain != '':
        gmf_model = GMF(num_users, num_items, configs.mf_dim)
        gmf_model.build(input_shape=([1, 1]))
        gmf_model.load_weights(configs.mf_pretrain)
        mlp_model = MLP(num_users, num_items, configs.layers, configs.reg_layers)
        mlp_model.build(input_shape=([1, 1]))
        mlp_model.load_weights(configs.mlp_pretrain)
        model = load_pretrain_model(model, gmf_model, mlp_model, len(configs.layers))

    # ---------------Init performance----------------
    (hits, ndcgs) = evaluate_model(model, testRatings, testNegatives, configs.topK)
    hr, ndcg = np.array(hits).mean(), np.array(ndcgs).mean()
    print('Init: HR = %.4f, NDCG = %.4f [%.1f s]' % (hr, ndcg, time() - t1))
    best_hr, best_ndcg, best_iter = hr, ndcg, -1
    if configs.out > 0:
        model.save_weights(model_out_file, overwrite=True)

    # -----------------Training model-------------
    for epoch in range(configs.epochs):
        t1 = time()
        # Generate training instances
Exemple #3
0
import json
import time
import requests

import numpy as np
from random import randint

from MLP import MLP
from dataProcessing import create_clients

for _ in range(4):
    i = int(os.environ['N_Client'])
    X_slices, y_slices = create_clients()

    client_model = MLP()
    client_model.build()

    global_weights = []
    for layer in range(len(client_model.get_layers()) * 2):
        r = requests.get(
            'http://server:8000/get_weights/?layer={}'.format(layer))
        global_weights.append(np.array(r.json()[str(layer)]))

    client_model.compile()
    client_model.set_weights(global_weights)
    client_model.fit(X_slices[i], y_slices[i])

    local_weights = client_model.get_weights()

    url = 'http://server:8000/send_weights/'