예제 #1
0
 def __init__(self, path_root):
     """init."""
     super(ParsePublications, self).__init__([])
     self.log = Logger.get_logger(auxi.get_fullname(self))
     self.path_root = path_root
     self.init_path("publications.json")
     self.want_we_want = ["acronym", "year", "key", "title", "authors"]
예제 #2
0
 def __init__(self, venue):
     """initialization."""
     self.venue_name = venue['venue']
     self.venue_url = venue['url']
     self.acronym = venue['acronym']
     self.xml = None
     super(Venues, self).__init__(['publications'])
     self.log = Logger.get_logger(auxi.get_fullname(self))
예제 #3
0
    def keep_tracking(self, sess):
        """keep track the status."""
        # Keep track of gradient values and sparsity (optional)
        grad_summaries = []
        for g, v in self.grads_and_vars:
            if g is not None:
                grad_hist_summary = tf.histogram_summary(
                    "{}/grad/hist".format(v.name), g)
                sparsity_summary = tf.scalar_summary(
                    "{}/grad/sparsity".format(v.name), tf.nn.zero_fraction(g))
                grad_summaries.append(grad_hist_summary)
                grad_summaries.append(sparsity_summary)
        grad_summaries_merged = tf.merge_summary(grad_summaries)

        # Output directory for models and summaries
        timestamp = str(int(time.time()))
        out_dir = os.path.join(para.TRAINING_DIRECTORY, "runs",
                               auxi.get_fullname(self))
        if self.force:
            shutil.rmtree(out_dir, ignore_errors=True)
        out_dir = os.path.join(out_dir, timestamp)
        self.out_dir = out_dir
        self.log.info("writing to {}\n".format(out_dir))

        # Summaries for loss and accuracy
        loss_summary = tf.scalar_summary("loss", self.loss)

        # Train Summaries
        self.train_summary_op = tf.merge_summary(
            [loss_summary, grad_summaries_merged])
        train_summary_dir = os.path.join(out_dir, "summaries", "train")
        self.train_summary_writer = tf.train.SummaryWriter(
            train_summary_dir, sess.graph_def)

        # dev summaries
        self.dev_summary_op = tf.merge_summary([loss_summary])
        dev_summary_dir = os.path.join(out_dir, "summaries", "dev")
        self.dev_summary_writer = tf.train.SummaryWriter(
            dev_summary_dir, sess.graph_def)

        # Checkpoint directory. Tensorflow assumes this directory
        # already exists so we need to create it
        checkpoint_dir = os.path.join(out_dir, "checkpoints")
        self.checkpoint_prefix = os.path.join(checkpoint_dir, "model")
        self.checkpoint_comparison = os.path.join(checkpoint_dir, "comparison")
        self.best_model = os.path.join(checkpoint_dir, "best_model")
        if not os.path.exists(checkpoint_dir):
            os.makedirs(checkpoint_dir)
            os.makedirs(self.checkpoint_comparison)
        self.saver = tf.train.Saver(tf.all_variables())
예제 #4
0
 def __init__(self):
     """init."""
     np.random.seed(para.SEED)
     self.log = Logger.get_logger(auxi.get_fullname(self))
     self.force = para.FORCE_RM_RECORD
     self.build_batch = 1
예제 #5
0
 def __init__(self):
     """init."""
     super(RandomSegmentation, self).__init__()
     self.log = Logger.get_logger(auxi.get_fullname(self))
     np.random.seed(para.SEED)
예제 #6
0
 def __init__(self, path_data):
     """init."""
     super(LinearModelSegmentation, self).__init__()
     self.log = Logger.get_logger(auxi.get_fullname(self))
     self.path_data = path_data
예제 #7
0
 def __init__(self):
     """init."""
     super(CrawlerAPI, self).__init__()
     self.log = Logger.get_logger(auxi.get_fullname(self))
예제 #8
0
 def __init__(self, path_root):
     """init."""
     self.log = Logger.get_logger(auxi.get_fullname(self))
     self.path_root = path_root
예제 #9
0
 def __init__(self):
     """init."""
     super(GetAuthors, self).__init__()
     self.log = Logger.get_logger(auxi.get_fullname(self))
     self.api = CrawlerAPI()