示例#1
0
def load_ratnet(data_dir, model_dir):
    logging.info("Loading data from cache...")
    with open(data_dir / 'beer_core-train.pkl', 'rb') as fp:
        reviews, beers = pickle.load(fp)

    random.seed(1337)
    random.shuffle(reviews)

    logging.info("Loading text sequences...")
    text_sequences = [
        CharacterSequence.from_string(review.text) for review in reviews
    ]

    text_encoding = OneHotEncoding(include_start_token=True,
                                   include_stop_token=True)
    text_encoding.build_encoding(text_sequences)

    identity_encoding = IdentityEncoding(1)

    logging.info("Loading model...")
    ratnet = CharacterRNN('ratnet',
                          len(text_encoding) + len(identity_encoding),
                          len(text_encoding),
                          n_layers=2,
                          n_hidden=1024)
    ratnet.load_parameters(model_dir / 'ratnet1_2-1024.pkl')
    ratnet.compile_method('generate_with_concat')
    ratnet.compile_method('log_probability')
    return ratnet, text_encoding
示例#2
0
with open('data/beer/beer_top-train.pkl', 'rb') as fp:
    reviews, beers = pickle.load(fp)

text_sequences = [CharacterSequence.from_string(review.text) for review in reviews]
beer_cats = [SingletonSequence(review.beer.style) for review in reviews]

review_num_seqs = [c.encode(text_encoding) for c in text_sequences]

num_seq = NumberSequence(np.concatenate([c.seq for c in review_num_seqs]))
beer_seq = NumberSequence(np.concatenate([c.encode(cat_encoding).replicate(len(r)).seq for c, r in
                                          zip(beer_cats, review_num_seqs)]))
batcher = WindowedBatcher([num_seq, beer_seq], [text_encoding, cat_encoding],
                          sequence_length=200, batch_size=256)

catnet = CharacterRNN('2pac', len(text_encoding) + len(cat_encoding),
                       len(text_encoding), n_layers=2, n_hidden=1024)
catnet.compile_method("generate_with_concat")

def load_charnet():
    catnet.load_parameters('models/charnet-top_2-1024-2.pkl')
    layer = catnet.lstm.input_layer

    weights = {
        'W_ix': layer.get_parameter_value("W_ix"),
        'W_ox': layer.get_parameter_value("W_ox"),
        'W_fx': layer.get_parameter_value("W_fx"),
        'W_gx': layer.get_parameter_value("W_gx"),
    }

    for w, value in weights.items():
        layer.set_parameter_value(w, np.vstack([value,
示例#3
0
        c.replicate(len(r)).seq for c, r in zip(beer_ratings, review_num_seqs)
    ]))
# batcher = WindowedBatcher(num_seq, [text_encoding, style_encoding], sequence_length=200, batch_size=500)
batcher = WindowedBatcher([num_seq, beer_seq],
                          [text_encoding, identity_encoding],
                          sequence_length=200,
                          batch_size=500)
# batcher = WindowedBatcher(num_seq, [text_encoding], sequence_length=200, batch_size=500)

D = text_encoding.index

# charrnn = CharacterRNN('2pac', len(text_encoding) + len(style_encoding), len(text_encoding), n_layers=2, n_hidden=512)
# charrnn = CharacterRNN('2pac', len(text_encoding), len(text_encoding), n_layers=2, n_hidden=1024)
charrnn = CharacterRNN('2pac',
                       len(text_encoding) + len(identity_encoding),
                       len(text_encoding),
                       n_layers=2,
                       n_hidden=512)
# charrnn.compile_method('generate')

# sgd = SGD(charrnn)
# rmsprop = RMSProp(charrnn)
# mom = Momentum(charrnn)


def train(optimizer, n_iterations, *args):
    state = None
    for i in xrange(n_iterations):
        X, y = batcher.next_batch()
        if state is None:
            state = np.zeros((X.shape[1], charrnn.n_layers, charrnn.n_hidden))