コード例 #1
0
ファイル: entry.py プロジェクト: comperical/gpt-2
    def run_op(self, argmap):

        modname = argmap.get_str(('modname', 'original'))
        seed = argmap.get_int(('seed', 10000))
        relmod = importlib.import_module(modname)
        print("Going to do Basic Model with module {}".format(modname))

        # Load the sample data, and peel off the initial layer of <start_token> data.
        origdata = self.load_data(seed)
        origdata = origdata[:, 1:]
        assert origdata.shape[1] == utility.SAMPLE_LENGTH

        enc = utility.get_encoder()
        hparams = utility.get_hparams()

        with tf.Session(graph=tf.Graph()) as sess:
            np.random.seed(seed)
            tf.set_random_seed(seed)

            tfop = relmod.model_or_sample(origdata.shape[0], origdata)

            ckpt = tf.train.latest_checkpoint(
                os.path.join('models', utility.MODEL_NAME))
            tf.train.Saver().restore(sess, ckpt)

            alpha = time.time()
            result = sess.run(tfop)
            print("Basic Model successful, took {:.03f} seconds".format(
                time.time() - alpha))

            utility.PickleData(relmod, 'modsample', seed, result=result).save()
コード例 #2
0
ファイル: entry.py プロジェクト: comperical/gpt-2
    def run_op(self, argmap):

        modname = argmap.get_str(('modname', 'original'))
        seed = argmap.get_int(('seed', 10000))
        batch_size = argmap.get_int(('batchsize', 100))

        enc = utility.get_encoder()
        hparams = utility.get_hparams()
        relmod = importlib.import_module(modname)

        with tf.Session(graph=tf.Graph()) as sess:
            np.random.seed(seed)
            tf.set_random_seed(seed)

            tfop = relmod.model_or_sample(batch_size)

            ckpt = tf.train.latest_checkpoint(
                os.path.join('models', utility.MODEL_NAME))
            tf.train.Saver().restore(sess, ckpt)

            alpha = time.time()
            result = sess.run(tfop)
            print(
                "Sample successful, took {:.03f} seconds".format(time.time() -
                                                                 alpha))

            utility.PickleData(relmod, 'sample', seed, result=result).save()
コード例 #3
0
ファイル: entry.py プロジェクト: comperical/gpt-2
    def run_op(self, argmap):

        modname = argmap.get_str(('modname', 'original'))
        relmod = importlib.import_module(modname)
        print("Going model hard-coded data with module {}".format(modname))

        # Load the hard-coded sample data
        origdata = utility.get_encoded_sents()

        #enc = utility.get_encoder()
        hparams = utility.get_hparams()

        with tf.Session(graph=tf.Graph()) as sess:

            # Notice!!! You don't need these set-seed operations here!!!
            # np.random.seed(seed)
            # tf.set_random_seed(seed)

            tfop = relmod.model_or_sample(origdata.shape[0], origdata)

            ckpt = tf.train.latest_checkpoint(
                os.path.join('models', utility.MODEL_NAME))
            tf.train.Saver().restore(sess, ckpt)

            alpha = time.time()
            result = sess.run(tfop)
            print("Basic Model successful, took {:.03f} seconds".format(
                time.time() - alpha))

            utility.PickleData(relmod, 'modhcode', 0, result=result).save()
コード例 #4
0
def model_or_sample(batch_size, modeldata=None):

    length = utility.SAMPLE_LENGTH if modeldata is None else modeldata.shape[1]

    return explicit_loop_bimodel(hparams=utility.get_hparams(),
                                 length=length,
                                 modeldata=modeldata,
                                 start_token=utility.get_start_token()[0],
                                 batch_size=batch_size)
コード例 #5
0
def model_or_sample(batch_size):

    return sample_sequence(hparams=utility.get_hparams(),
                           length=utility.SAMPLE_LENGTH,
                           start_token=utility.get_start_token()[0],
                           batch_size=batch_size)