示例#1
0
def get_sentence_importance_scores(embedding_model, logistic_regression, x):
    objective = CrossEntropy()

    x_combined = [w for s in x for w in s]

    meta_combined = {
        'lengths': np.asarray([len(x_combined)]),
        'space_below': cpu.space.CPUSpace(
            axes=('b', 'w'),
            extents={'b': 1, 'w': len(x_combined)})
        }

    x_combined = np.asarray(x_combined).reshape((1, -1))

    embeddings, embeddings_meta, embeddings_state = embedding_model.fprop(
        x_combined, meta=dict(meta_combined), return_state=True)
    embeddings_meta['space_below'] = embeddings_meta['space_above']
    y_hat, y_hat_meta, log_reg_state = logistic_regression.fprop(
        embeddings, meta=dict(embeddings_meta), return_state=True)
    y_hat_meta['space_below'] = y_hat_meta['space_above']

    loss, loss_meta, loss_state = objective.fprop(
        y_hat, max_error_label(y_hat), meta=dict(y_hat_meta))

    delta, delta_meta = objective.bprop(
        y_hat, max_error_label(y_hat), meta=dict(loss_meta), fprop_state=loss_state)

    delta = logistic_regression.bprop(
        delta, meta=dict(delta_meta), fprop_state=log_reg_state)

    C = combiner_matrix(map(len, x))

    sentence_delta = np.dot(delta, C)
    sentence_embedding = np.dot(embeddings, C)

    # normalize for cosine distance
    sentence_delta /= np.sqrt(np.sum(sentence_delta**2, axis=1, keepdims=True))
    sentence_embedding /= np.sqrt(np.sum(sentence_embedding**2, axis=1, keepdims=True))

    sentence_importance_scores = np.abs(np.sum(sentence_delta * sentence_embedding, axis=0))

    return sentence_importance_scores
def get_sentence_importance_scores(embedding_model, logistic_regression, x):
    objective = CrossEntropy()

    x_combined = [w for s in x for w in s]

    meta_combined = {
        "lengths": np.asarray([len(x_combined)]),
        "space_below": cpu.space.CPUSpace(axes=("b", "w"), extents={"b": 1, "w": len(x_combined)}),
    }

    x_combined = np.asarray(x_combined).reshape((1, -1))

    embeddings, embeddings_meta, embeddings_state = embedding_model.fprop(
        x_combined, meta=dict(meta_combined), return_state=True
    )
    embeddings_meta["space_below"] = embeddings_meta["space_above"]
    y_hat, y_hat_meta, log_reg_state = logistic_regression.fprop(
        embeddings, meta=dict(embeddings_meta), return_state=True
    )
    y_hat_meta["space_below"] = y_hat_meta["space_above"]

    loss, loss_meta, loss_state = objective.fprop(y_hat, max_error_label(y_hat), meta=dict(y_hat_meta))

    delta, delta_meta = objective.bprop(y_hat, max_error_label(y_hat), meta=dict(loss_meta), fprop_state=loss_state)

    delta = logistic_regression.bprop(delta, meta=dict(delta_meta), fprop_state=log_reg_state)

    C = combiner_matrix(map(len, x))

    sentence_delta = np.dot(delta, C)
    sentence_embedding = np.dot(embeddings, C)

    # normalize for cosine distance
    sentence_delta /= np.sqrt(np.sum(sentence_delta ** 2, axis=1, keepdims=True))
    sentence_embedding /= np.sqrt(np.sum(sentence_embedding ** 2, axis=1, keepdims=True))

    sentence_importance_scores = np.abs(np.sum(sentence_delta * sentence_embedding, axis=0))

    return sentence_importance_scores
def get_model_output(model, X,Y):
    #Initializing the data provided
    data_provider = cpu.optimize.data_provider.LabelledSequenceBatchProvider(
        X=X, Y=Y, padding='PADDING')


    #Define the cost function
    cEntr = CrossEntropy()

    #Get data and use the model to Predict
    X, Y, meta = data_provider.next_batch()
    Y_hat, meta, model_state = model.fprop(X, meta=meta, return_state=True)

    #Create a Y that maximizes the error of the model
    Y_inverted = enforce_error(Y_hat)

    #Bookkeep the spaces and BPROP to get the deltas
    meta['space_below'] = meta['space_above']
    cost, meta, cost_state = cEntr.fprop(Y_hat, Y_inverted, meta=meta)
    delta, meta = cEntr.bprop(Y_hat, Y_inverted, meta=meta, fprop_state=cost_state)
    delta, meta = model.bprop(delta, meta=meta, fprop_state=model_state, return_state=True, num_layers=-1)
    delta, space = meta['space_below'].transform(delta, ('b', 'w'))

    return Y_hat, Y_inverted, delta