Beispiel #1
0
    def summary_evaluation(self, sess: tf.Session, data_key: str, writer):
        opts = self.options
        g = self.graph  # tensorflow.python.framework.ops.Graph

        data_iter = self.data[data_key]

        step = tf.train.get_global_step(g).eval(session=sess)
        current_epoch = self.get_current_epoch_by_step(step)
        # load if exists
        save_to_path = os.path.join(opts.output_dir, 'LP',
                                    'rank-{}.pkl'.format(step))
        eval_scores = g.get_collection(tools.LinkPredictionScoreKey)
        if os.path.exists(save_to_path):
            with open(save_to_path, 'rb') as pklfile:
                ranks = pickle.load(pklfile)
            tf_summary_value, _ = self.evaluator.cal_result_by_rank(
                ranks, step, current_epoch, data_key)
        else:
            tools.make_sure_path_exists(os.path.dirname(save_to_path))
            place_holders = self.args_input[0] + [self.relation_inputs]
            score_batches = self.eval_epoch(sess, data_iter, place_holders,
                                            eval_scores)
            tf_summary_value, ranks = self.evaluator.log_results(
                score_batches, step, current_epoch, data_key)
        writer.add_summary(tf.Summary(value=tf_summary_value), step)
        # if data_key is 'test':
        with open(save_to_path, 'wb') as pklfile:
            pickle.dump(ranks, pklfile)
        return ranks
Beispiel #2
0
    def save(self, output_dir=None):
        if not output_dir:
            output_dir = self.output_dir
        tools.make_sure_path_exists(output_dir)
        self.output_time = str(datetime.now().strftime(time_format))
        output_filename = os.path.join(output_dir, "options.json")
        output_filename = tools.find_available_filename(output_filename)

        json.dump(self.get_dict(), open(output_filename, 'w'), indent=13)

        logger = logging.getLogger(__name__)
        logger.debug("Save learning arguments to {}".format(output_filename))
        logger.info("[Learning Arguments] {}".format(str(self)))
Beispiel #3
0
    def generate_output_dir(self, namespace):
        if isinstance(namespace, list):
            namespace, _ = self.try_parse_args(namespace)
        if not hasattr(namespace, 'output_dir') or not hasattr(namespace, 'experiment_mode'):
            return None
        output_dir = namespace.output_dir
        if output_dir is None:
            model_dir = settings.model_dir
            experiment_mode = namespace.experiment_mode

            output_dir = os.path.join(model_dir, experiment_mode, "m")
            output_dir = tools.find_available_filename(output_dir)
            # model_id = namespace._get_file_name(self)
            # output_dir += model_id

        tools.make_sure_path_exists(output_dir)
        return output_dir
Beispiel #4
0
def run_model(main_opts, args):
    # parse, get default arguments related to model
    parser = program_parser
    # parse
    opts = parser.parse_args(args)
    os.environ['CUDA_VISIBLE_DEVICES'] = str(opts.gpus)

    # run
    if opts.output_dir is not None:
        tools.make_sure_path_exists(opts.output_dir)
        OieParser.config_log(opts.output_dir, opts.v)
    else:
        logging.basicConfig(level='DEBUG')
        logging.warning('No output log file')
    logger = logging.getLogger(__name__)
    logger.debug(opts)
    opts.run(opts)
    return opts
    def load_model(self, sess, saver, step=None):
        # ----------- Saver and Restore  ----------------------------
        opts = self.options
        g = self.graph
        checkpoint_dir = os.path.join(opts.output_dir,
                                      "checkpoints/checkpoint")
        tools.make_sure_path_exists(os.path.dirname(checkpoint_dir))
        ckpt = tf.train.get_checkpoint_state(os.path.dirname(checkpoint_dir))

        if ckpt and ckpt.model_checkpoint_path:
            if step is None:
                restore_path = ckpt.model_checkpoint_path
            else:
                restore_path = os.path.exists(checkpoint_dir +
                                              "-{:d}".format(step))
                assert os.path.exists(
                    restore_path), "ckpt {:} not exists!".format(restore_path)

            saver.restore(sess, restore_path)
            self.logger.info(
                'Load model from [{}] successfully'.format(restore_path))
    def cal_rel_probs(
        self,
        sess: tf.Session,
        data_key,
    ):
        opts = self.options
        g = self.graph
        data_iter = self.data[data_key]
        self.logger.debug('calculating {} probabilities'.format(data_key))

        if 'decoder' not in data_key.split('-'):
            with tf.variable_scope("encoder", reuse=True):
                rel_probs_tensor = g.get_tensor_by_name(
                    "encoder/relation_predictions:0")
                entropy_tensor = self.entropy
            placeholders = self.args_input[0] + list(self.encoder_input)
        else:
            rel_probs_tensor = self.decoder.predictions
            entropy_tensor = self.decoder.entropy
            placeholders = self.args_input[0]

        score_batches = self.eval_epoch(sess, data_iter, placeholders,
                                        [rel_probs_tensor, entropy_tensor])
        rel_preds = score_batches[0]
        total_entropy = np.sum(score_batches[1])

        # save predictions
        output_dir = os.path.join(opts.output_dir, 'predictions')
        tools.make_sure_path_exists(output_dir)
        step = tf.train.get_global_step(g).eval(session=sess)
        rel_labels = self.data.get_relation_labels(data_key)

        np.savez(os.path.join(output_dir, "%s_%d.npz" % (data_key, step)),
                 label=rel_labels,
                 pred=rel_preds)
        return rel_preds, total_entropy
Beispiel #7
0
#         if isinstance(v, bool):
#             if v:
#                 args += ['--' + k]
#         elif v is None:
#             continue
#         else:
#             args += ['--' + k, str(v)]
#     args += ['--epochs', str(100),
#              '--shuffle_flag',
#              '--learning_rate', '0.5',
#              ]
#     if kbreg_args is not None:
#         if isinstance(kbreg_args, LearningArguments):
#             opts = kbreg_args
#         else:
#             opts = oie_parser.try_parse_args(kbreg_args)
#         args += ['--gpus', str(opts.gpus),
#                  ]
#         if opts.v > 0:
#             args.append('-' + 'v' * opts.v)

#     return args


if __name__ == "__main__":
    # options = LearningArguments()
    # options.parse("--model A --pickled_dataset ./data/data.pkl".split())
    settings.model_dir = '/tmp/tmp'
    tools.make_sure_path_exists(settings.model_dir)