from common.argparser import argparser from common.arguments import Arguments from common.utils import get_paths from common.approximation import get_dga_sdirs import pickle as pkl args = Arguments(argparser()) paths = get_paths(args) print('Loading data: {}'.format(paths.data_path)) X_trains, _, y_trains, _, meta = pkl.load(open(paths.data_path, 'rb')) sdirs = get_dga_sdirs(args, X_trains, y_trains) print('Saving:', paths.dga_path) if not args.dry_run: pkl.dump(sdirs, open(paths.dga_path, 'wb'))
from common.utils import get_device, get_paths, \ init_logger, is_approx from data.loader import get_loader from models.model_op import add_param_list, get_model_grads from models.multi_class_hinge_loss import multiClassHingeLoss from models.utils import get_model from models.train import test, sdirs_approximation print = functools.partial(print, flush=True) torch.set_printoptions(linewidth=120) # ------------------------------------------------------------------------------ # Setups # ------------------------------------------------------------------------------ args = Arguments(argparser()) hook = sy.TorchHook(torch) device = get_device(args) paths = get_paths(args) log_file, std_out = init_logger(paths.log_file, args.dry_run) if os.path.exists(paths.tb_path): shutil.rmtree(paths.tb_path) tb = SummaryWriter(paths.tb_path) print('+' * 80) print(paths.model_name) print('+' * 80) print(args.__dict__) print('+' * 80)
from common.arguments import Arguments from common.utils import get_device, get_paths, init_logger from data.distributor import get_fl_graph from data.loader import get_loader from models.train import distributed_train, test from models.utils import get_model from viz.training_plots import training_plots print = functools.partial(print, flush=True) torch.set_printoptions(linewidth=120) # ------------------------------------------------------------------------------ # Setups # ------------------------------------------------------------------------------ args = Arguments(argparser()) hook = sy.TorchHook(torch) device = get_device(args) paths = get_paths(args, distributed=True) log_file, std_out = init_logger(paths.log_file, args.dry_run, args.load_model) if os.path.exists(paths.tb_path): shutil.rmtree(paths.tb_path) tb = SummaryWriter(paths.tb_path) print('+' * 80) print(paths.model_name) print('+' * 80) print(args.__dict__) print('+' * 80)
from common.utils import get_device, get_paths, init_logger, \ tb_model_summary from data.distributor import get_fl_graph from data.loader import get_loader from models.train import fl_train, test from models.utils import get_model from viz.training_plots import training_plots print = functools.partial(print, flush=True) torch.set_printoptions(linewidth=120) # ------------------------------------------------------------------------------ # Setups # ------------------------------------------------------------------------------ args = Arguments(argparser()) hook = sy.TorchHook(torch) device = get_device(args) paths = get_paths(args) log_file, std_out = init_logger(paths.log_file, args.dry_run) if os.path.exists(paths.tb_path): shutil.rmtree(paths.tb_path) tb = SummaryWriter(paths.tb_path) print('+' * 80) print(paths.model_name) print('+' * 80) print(args.__dict__) print('+' * 80)