from common.argparser import argparser from common.arguments import Arguments from common.utils import get_paths from common.approximation import get_dga_sdirs import pickle as pkl args = Arguments(argparser()) paths = get_paths(args) print('Loading data: {}'.format(paths.data_path)) X_trains, _, y_trains, _, meta = pkl.load(open(paths.data_path, 'rb')) sdirs = get_dga_sdirs(args, X_trains, y_trains) print('Saving:', paths.dga_path) if not args.dry_run: pkl.dump(sdirs, open(paths.dga_path, 'wb'))
from data.loader import get_loader from models.train import distributed_train, test from models.utils import get_model from viz.training_plots import training_plots print = functools.partial(print, flush=True) torch.set_printoptions(linewidth=120) # ------------------------------------------------------------------------------ # Setups # ------------------------------------------------------------------------------ args = Arguments(argparser()) hook = sy.TorchHook(torch) device = get_device(args) paths = get_paths(args, distributed=True) log_file, std_out = init_logger(paths.log_file, args.dry_run, args.load_model) if os.path.exists(paths.tb_path): shutil.rmtree(paths.tb_path) tb = SummaryWriter(paths.tb_path) print('+' * 80) print(paths.model_name) print('+' * 80) print(args.__dict__) print('+' * 80) # prepare graph and data _, workers = get_fl_graph(hook, args.num_workers) print('Loading data: {}'.format(paths.data_path))