Пример #1
0
    def BetaAlpha(self, b, depth, alpha, beta):
        self.noeuds += 1
        """ MaxMin avec Alpha beta pruning"""
        if b.is_game_over():
            resultat = b.result()
            if resultat == "1-0":
                r = -((-1)**self.maCouleur) * self.scoremax
            elif resultat == "0-1":
                r = ((-1)**self.maCouleur) * self.scoremax
            else:
                r = 0
            return r

        if depth == 0:
            self.evaluation = eval.Eval(self.maCouleur)
            e = self.evaluation.evaluate(b)
            return e

        v = None
        for m in b.generate_legal_moves():
            b.push(m)
            ret = self.AlphaBeta(b, depth - 1, alpha, beta)
            b.pop()
            if v is None or ret > v:
                v = ret
            if alpha < v:
                alpha = v
            if alpha >= beta:
                return beta
        return alpha
 def eval(self, **kwargs):
     logging.info('evaluating')
     if torch.cuda.is_available():
         self.inference.cpu()
     try:
         e = _eval.Eval(self.args, self.config)
         cls_ap = e()
         self.backup_best(cls_ap, e.path)
     except:
         traceback.print_exc()
     if torch.cuda.is_available():
         self.inference.cuda()
Пример #3
0
 def eval(self, **kwargs):
     step, inference = (kwargs[key] for key in 'step, inference'.split(', '))
     logging.info('evaluating')
     if torch.cuda.is_available():
         inference.cpu()
     try:
         e = _eval.Eval(self.args, self.config)
         cls_ap = e()
         self.backup_best(cls_ap, e.path)
     except:
         traceback.print_exc()
     if torch.cuda.is_available():
         inference.cuda()
Пример #4
0
 def eval(self, **kwargs):
     step, inference = (kwargs[key] for key in 'step, inference'.split(', '))
     logging.info('evaluating')
     if torch.cuda.is_available():
         inference.cpu()
     try:
         e = _eval.Eval(self.args, self.config)
         cls_ap = e()
         for c in cls_ap:
             self.summary_worker.writer.add_scalar('ap/' + self.category[c], cls_ap[c], step)
         self.summary_worker.writer.add_scalar('mean_ap', np.mean(list(cls_ap.values())), step)
     except Exception as e:
         logging.warning(e)
     if torch.cuda.is_available():
         inference.cuda()
Пример #5
0
 def __init__(self, color):
     self.maCouleur = color
     self.noeuds = 0
     self.scoremax = 100000
     self.evaluation = eval.Eval(color)
Пример #6
0
    with open(args.parampath) as f:
        params = yaml.load(f)

    if params is None:
        logger.error('no params found from %s' % args.parampath)
        sys.exit(1)

    files = os.listdir(args.imagepath)

    images = []
    for _ in files:
        images.append(os.path.join(args.imagepath, _))

    evaer = eval.Eval(modelprefix=args.modelprefix,
                      imagepath=images,
                      inputshape=params['inputshape'],
                      labelpath=params['eval']['label'],
                      epoch=args.epoch,
                      format=args.format)
    names, prob_predicts, label_predicts = evaer.predict()

    reports = []

    for i in range(len(names)):
        print('name: %s, predict: %s, acc: %f' %
              (names[i], label_predicts[i], prob_predicts[i]))
        reports.append(names[i] + ' : ' + label_predicts[i] + ' : ' +
                       str(prob_predicts[i]) + '\n')

    with open('reports.txt', 'a') as f:
        for report in reports:
            f.write(report)
Пример #7
0
    def __init__(self, vocab_file_path=None, model_file_path=None):
        """

        :param vocab_file_path: tuple of code vocab, ast vocab, nl vocab, if given, build vocab by given path
        :param model_file_path:
        """

        # dataset
        self.train_dataset = data.CodePtrDataset(
            code_path=config.train_code_path,
            ast_path=config.train_sbt_path,
            nl_path=config.train_nl_path)
        self.train_dataset_size = len(self.train_dataset)
        self.train_dataloader = DataLoader(
            dataset=self.train_dataset,
            batch_size=config.batch_size,
            shuffle=True,
            collate_fn=lambda *args: utils.unsort_collate_fn(
                args,
                code_vocab=self.code_vocab,
                ast_vocab=self.ast_vocab,
                nl_vocab=self.nl_vocab))

        # vocab
        self.code_vocab: utils.Vocab
        self.ast_vocab: utils.Vocab
        self.nl_vocab: utils.Vocab
        # load vocab from given path
        if vocab_file_path:
            code_vocab_path, ast_vocab_path, nl_vocab_path = vocab_file_path
            self.code_vocab = utils.load_vocab_pk(code_vocab_path)
            self.ast_vocab = utils.load_vocab_pk(ast_vocab_path)
            self.nl_vocab = utils.load_vocab_pk(nl_vocab_path)
        # new vocab
        else:
            self.code_vocab = utils.Vocab('code_vocab')
            self.ast_vocab = utils.Vocab('ast_vocab')
            self.nl_vocab = utils.Vocab('nl_vocab')
            codes, asts, nls = self.train_dataset.get_dataset()
            for code, ast, nl in zip(codes, asts, nls):
                self.code_vocab.add_sentence(code)
                self.ast_vocab.add_sentence(ast)
                self.nl_vocab.add_sentence(nl)

            self.origin_code_vocab_size = len(self.code_vocab)
            self.origin_nl_vocab_size = len(self.nl_vocab)

            # trim vocabulary
            self.code_vocab.trim(config.code_vocab_size)
            self.nl_vocab.trim(config.nl_vocab_size)
            # save vocabulary
            self.code_vocab.save(config.code_vocab_path)
            self.ast_vocab.save(config.ast_vocab_path)
            self.nl_vocab.save(config.nl_vocab_path)
            self.code_vocab.save_txt(config.code_vocab_txt_path)
            self.ast_vocab.save_txt(config.ast_vocab_txt_path)
            self.nl_vocab.save_txt(config.nl_vocab_txt_path)

        self.code_vocab_size = len(self.code_vocab)
        self.ast_vocab_size = len(self.ast_vocab)
        self.nl_vocab_size = len(self.nl_vocab)

        # model
        self.model = models.Model(code_vocab_size=self.code_vocab_size,
                                  ast_vocab_size=self.ast_vocab_size,
                                  nl_vocab_size=self.nl_vocab_size,
                                  model_file_path=model_file_path)
        self.params = list(self.model.code_encoder.parameters()) + \
            list(self.model.ast_encoder.parameters()) + \
            list(self.model.reduce_hidden.parameters()) + \
            list(self.model.decoder.parameters())

        # optimizer
        self.optimizer = Adam([
            {
                'params': self.model.code_encoder.parameters(),
                'lr': config.code_encoder_lr
            },
            {
                'params': self.model.ast_encoder.parameters(),
                'lr': config.ast_encoder_lr
            },
            {
                'params': self.model.reduce_hidden.parameters(),
                'lr': config.reduce_hidden_lr
            },
            {
                'params': self.model.decoder.parameters(),
                'lr': config.decoder_lr
            },
        ],
                              betas=(0.9, 0.999),
                              eps=1e-08,
                              weight_decay=0,
                              amsgrad=False)

        if config.use_lr_decay:
            self.lr_scheduler = lr_scheduler.StepLR(
                self.optimizer,
                step_size=config.lr_decay_every,
                gamma=config.lr_decay_rate)

        # best score and model(state dict)
        self.min_loss: float = 1000
        self.best_model: dict = {}
        self.best_epoch_batch: (int, int) = (None, None)

        # eval instance
        self.eval_instance = eval.Eval(self.get_cur_state_dict())

        # early stopping
        self.early_stopping = None
        if config.use_early_stopping:
            self.early_stopping = utils.EarlyStopping()

        config.model_dir = os.path.join(config.model_dir,
                                        utils.get_timestamp())
        if not os.path.exists(config.model_dir):
            os.makedirs(config.model_dir)