Esempio n. 1
0
class HttpCmder(BaseCmder):
    _net = None
    _params = None
    _retun_data = None

    def __init__(self, url, code):
        super(HttpCmder, self).__init__(code)
        self._net = Network(url)
        pass

    def post(self):
        if self._params:
            self._retun_data = self._net.open('post', '', self._params)
            self._logger.debug("reslut:" + self._retun_data)
        pass

    def get(self):
        if self._params:
            self._retun_data = self._net.open('get', '', self._params)
            self._logger.debug("reslut:" + self._retun_data)
        pass

    def setParams(self, params):
        self._params = params
        pass
Esempio n. 2
0
    def __init__(self, vocab_words, vocab_tags, hidden_dim=100, \
                 word_emb_dim=50, n_word_emb=3, \
                 tag_emb_dim=10, n_tag_emb=1):

        self.n_word_emb = n_word_emb
        self.n_tag_emb = n_tag_emb

        word_types = len(vocab_words)
        tag_types = len(vocab_tags)
        embeddings = []

        for i in range(n_word_emb):
            embeddings.append(torch.nn.Embedding(word_types, word_emb_dim))
            torch.nn.init.normal_(embeddings[i].weight, mean=0, std=0.01)
        for i in range(n_word_emb, n_tag_emb + n_word_emb):
            embeddings.append(torch.nn.Embedding(tag_types, tag_emb_dim))
            torch.nn.init.normal_(embeddings[i].weight, mean=0, std=0.01)

        self.model = Network(embeddings, hidden_dim, tag_types)
Esempio n. 3
0
class NeuralParser(Parser):  # TODO: Inherit from Parser

    def __init__(self, vocab_words, vocab_tags,
                 output_dim=3, hidden_dim=200,
                 word_emb_dim=50, word_features=3,
                 tag_emb_dim=10, tag_features=3):

        self.vocab_words = vocab_words
        self.vocab_tags = vocab_tags

        self.word_features = word_features
        self.tag_features = tag_features

        word_types = len(vocab_words)
        tag_types = len(vocab_tags)
        embeddings = []

        for i in range(word_features):
            embeddings.append(torch.nn.Embedding(word_types, word_emb_dim))
            torch.nn.init.normal_(embeddings[i].weight, mean=0, std=0.01)
        for i in range(word_features, word_features + tag_features):
            embeddings.append(torch.nn.Embedding(tag_types, tag_emb_dim))
            torch.nn.init.normal_(embeddings[i].weight, mean=0, std=0.01)

        self.model = Network(embeddings, hidden_dim, output_dim)

    def featurize(self, words, tags, config):
        i, stack, heads = config
        if i < len(words) and words[i] not in self.vocab_words:
            self.vocab_words[words[i]] = 1
        feats = torch.zeros(self.word_features + self.tag_features, dtype=torch.long)
        feats[0] = self.vocab_words[words[i]] if i < len(words) else 0
        feats[1] = self.vocab_words[words[stack[-1]]] if len(stack) > 0 else 0
        feats[2] = self.vocab_words[words[stack[-2]]] if len(stack) > 1 else 0
        feats[3] = self.vocab_tags[tags[i]] if i < len(words) else 0
        feats[4] = self.vocab_tags[tags[stack[-1]]] if len(stack) > 0 else 0
        feats[5] = self.vocab_tags[tags[stack[-2]]] if len(stack) > 1 else 0
        return feats

    def predict(self, words, tags):
        pred_heads = []
        config = Parser.initial_config(len(words))
        while not Parser.is_final_config(config):
            valid_moves = Parser.valid_moves(config)
            features = self.featurize(words, tags, config)
            pred_moves = self.model.forward(features)
            best_m_s = [valid_moves[0], pred_moves[valid_moves[0]]]
            for m in valid_moves:
                if pred_moves[m] > best_m_s[1]:
                    best_m_s = [m, pred_moves[m]]
            config = Parser.next_config(config, best_m_s[0])
        return config[2]
Esempio n. 4
0
class NeuralTagger(Tagger):

    def __init__(self, vocab_words, vocab_tags, hidden_dim=100, \
                 word_emb_dim=50, n_word_emb=3, \
                 tag_emb_dim=10, n_tag_emb=1):

        self.n_word_emb = n_word_emb
        self.n_tag_emb = n_tag_emb

        word_types = len(vocab_words)
        tag_types = len(vocab_tags)
        embeddings = []

        for i in range(n_word_emb):
            embeddings.append(torch.nn.Embedding(word_types, word_emb_dim))
            torch.nn.init.normal_(embeddings[i].weight, mean=0, std=0.01)
        for i in range(n_word_emb, n_tag_emb + n_word_emb):
            embeddings.append(torch.nn.Embedding(tag_types, tag_emb_dim))
            torch.nn.init.normal_(embeddings[i].weight, mean=0, std=0.01)

        self.model = Network(embeddings, hidden_dim, tag_types)

    def featurize(self, words, i, pred_tags):
        features = torch.zeros(self.n_word_emb + self.n_tag_emb,
                               dtype=torch.long)
        for pos in range(self.n_word_emb):
            if pos == 0:
                features[pos] = words[i]
            elif pos == 1:
                if (i != 0):
                    features[pos] = words[i - 1]
            elif i + pos - 1 < len(words):
                features[pos] = words[i + pos - 1]
            else:
                features[pos] = 0

        for pos in range(0, self.n_tag_emb):
            if i - pos > 0:
                features[self.n_word_emb + pos] = pred_tags[(-pos - 1)]

        return features

    def predict(self, words):
        pred_tags = []
        for i in range(len(words)):
            features = self.featurize(words, i, pred_tags)
            scores = self.model.forward(features)
            pred_tag = scores.argmax()
            pred_tags.append(pred_tag)

        return pred_tags
Esempio n. 5
0
    def __init__(self, vocab_words, vocab_tags,
                 output_dim=3, hidden_dim=200,
                 word_emb_dim=50, word_features=3,
                 tag_emb_dim=10, tag_features=3):

        self.vocab_words = vocab_words
        self.vocab_tags = vocab_tags

        self.word_features = word_features
        self.tag_features = tag_features

        word_types = len(vocab_words)
        tag_types = len(vocab_tags)
        embeddings = []

        for i in range(word_features):
            embeddings.append(torch.nn.Embedding(word_types, word_emb_dim))
            torch.nn.init.normal_(embeddings[i].weight, mean=0, std=0.01)
        for i in range(word_features, word_features + tag_features):
            embeddings.append(torch.nn.Embedding(tag_types, tag_emb_dim))
            torch.nn.init.normal_(embeddings[i].weight, mean=0, std=0.01)

        self.model = Network(embeddings, hidden_dim, output_dim)
Esempio n. 6
0
def clone():
    tmp = load_object("seed-9473.save")

    net = Network()

    net.add(FCLayer(30, 30))
    net.add(ActivationLayer(tanh, tanh_prime))
    net.add(FCLayer(30, 30))
    net.add(ActivationLayer(tanh, tanh_prime))
    net.add(FCLayer(30, 30))
    net.add(ActivationLayer(tanh, tanh_prime))
    net.add(FCLayer(30, 2))
    net.add(ActivationLayer(softmax, softmax_prime))

    net.layers[0] = tmp.layers[0]
    net.layers[1] = tmp.layers[1]
    net.layers[2] = tmp.layers[2]
    net.layers[3] = tmp.layers[3]
    net.layers[4] = tmp.layers[4]
    net.layers[5] = tmp.layers[5]
    net.layers[6] = tmp.layers[6]

    save_object(net, f'seed-clone-9473-soft.save')
Esempio n. 7
0
 def __init__(self, url, code):
     super(HttpCmder, self).__init__(code)
     self._net = Network(url)
     pass
Esempio n. 8
0
def train(args, df_x, df_y):
    """
    :param args: dict from argparse
    :param df_x: DataFrame containing xs
    :param df_y: DataFrame containing ys
    :return: Network's instance
    """

    df_x_train, df_x_test, df_y_train, df_y_test = train_test_split(
        df_x, df_y, test_size=0.2, random_state=0)
    net = Network()

    # Loads a seed if load
    if args.load:
        if os.path.exists(os.path.join(os.getcwd(), args.load)):
            net = load_object(args.load)
        else:
            sys.exit(display_errors_dict('wrong_load'))
    else:
        net.add(FCLayer(30, 30))
        net.add(ActivationLayer(tanh, tanh_prime))
        net.add(FCLayer(30, 30))
        net.add(ActivationLayer(tanh, tanh_prime))
        net.add(FCLayer(30, 30))
        net.add(ActivationLayer(tanh, tanh_prime))
        net.add(FCLayer(30, 2))
        net.add(ActivationLayer(softmax, softmax_prime))

    # Saves seed
    if args.save and not args.load:
        save_object(net, f'saved-seed.save')

    # Trains model
    net.use(mse, mse_prime)
    net.fit(df_x_train,
            df_y_train,
            df_x_test,
            df_y_test,
            epoch=args.epoch,
            learning_rate=args.learning,
            error_rate=0.01)
    plot_loss(net)

    # Evaluates with the 20% DataFrame
    rse_acc = evaluate_and_predict(net, df_x_test, df_y_test)

    # Renames seed with accuracy score
    if args.save and not args.load:
        move('saved-seed.save', f'seed-{int(rse_acc * 10000)}.save')

    # Saves network objet
    save_object(net, 'network.save')

    return net