Exemple #1
0
import numpy as np
import matplotlib.pyplot as plt

from PIL import Image
from utils import log
from data import Dataset
from logger import LOGGER
from evaluation import Evaluation
from model import CNNModels

os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"

# %% arrange arguments
FLAGS = utils.parse_args()
# %% set logger
logger = LOGGER(FLAGS)
log('Create Logger Successfully')
# %% set dataset
dataset = Dataset(FLAGS, logger)
n_train_imgs = dataset.num_train
log('Get Data Successfully')
# %% set evaluator
evaluator = Evaluation(logger, dataset)
log('Create Evaluator Successfully')

###################################################################
######################## Create Model #############################
###################################################################
# hyper-parameters
os.environ['CUDA_VISIBLE_DEVICES'] = FLAGS.gpu_index
epochs = FLAGS.epochs
Exemple #2
0
def main(args):
    ts = time.strftime('%Y-%b-%d-%H:%M:%S', time.gmtime())
    seed = 1234
    set_seed(seed)

    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
    print('device', device)

    # local_rank = None
    # if args.parallel:
    #     local_rank = args.local_rank
    #     torch.distributed.init_process_group(backend="nccl")
    #     device = torch.device('cuda:{}'.format(local_rank))

    data_obj = DATA()

    if "beer" in args.data_name:
        train_data, valid_data, vocab_obj = data_obj.f_load_graph_ratebeer(
            args)
        # train_data, valid_data, vocab_obj = data_obj.f_load_ratebeer(args)

    if args.train:
        now_time = datetime.now()
        time_name = str(now_time.month) + "_" + str(now_time.day) + "_" + str(
            now_time.hour) + "_" + str(now_time.minute)
        model_file = os.path.join(args.model_path,
                                  args.data_name + "_" + args.model_name)

        if not os.path.isdir(model_file):
            print("create a directory", model_file)
            os.mkdir(model_file)

        args.model_file = model_file + "/model_best_" + time_name + ".pt"
        print("model_file", model_file)

    # print("vocab_size", vocab_obj.vocab_size)
    print("user num", vocab_obj.user_num)
    print("item num", vocab_obj.item_num)

    network = GraphX(args, vocab_obj, device)

    total_param_num = 0
    for name, param in network.named_parameters():
        if param.requires_grad:
            param_num = param.numel()
            total_param_num += param_num
            print(name, "\t", param_num)

    print("total parameters num", total_param_num)

    if args.train:
        logger_obj = LOGGER()
        logger_obj.f_add_writer(args)

        optimizer = OPTIM(
            filter(lambda p: p.requires_grad, network.parameters()), args)
        trainer = TRAINER(vocab_obj, args, device)
        trainer.f_train(train_data, valid_data, network, optimizer, logger_obj)

        logger_obj.f_close_writer()

    if args.eval:
        print("=" * 10, "eval", "=" * 10)

        eval_obj = EVAL(vocab_obj, args, device)

        network = network.to(device)

        eval_obj.f_init_eval(network, args.model_file, reload_model=True)

        eval_obj.f_eval(train_data, valid_data)