def trainData(setObj,classes,trainingset,class_bags,hidden_neurons=10, alpha=1, epochs=50000,dropout_percent=0.5):
    numpy.random.seed(1)

    #randomly select weights
    last_mean_error = 1
    # randomly initialize our weights with mean 0
    synapse_0 = 2 * numpy.random.random((len(trainingset[0]), hidden_neurons)) - 1
    #matrix with 16 rows(number of words) 10 colomns (middle layer nuerons)
    synapse_1 = 2 * numpy.random.random((hidden_neurons, len(classes))) - 1

    prev_synapse_0_weight_update = numpy.zeros_like(synapse_0)
    prev_synapse_1_weight_update = numpy.zeros_like(synapse_1)
    #contain all 0s
    synapse_0_direction_count = numpy.zeros_like(synapse_0)
    synapse_1_direction_count = numpy.zeros_like(synapse_1)
    #contain all 0s
    synapse_0_direction_count = numpy.zeros_like(synapse_0)
    synapse_1_direction_count = numpy.zeros_like(synapse_1)
    for j in iter(range(epochs + 1)):
        layer_0 = numpy.array(trainingset)
        layer_1 = sigmoid(numpy.dot(layer_0, synapse_0))
        layer_2 = sigmoid(numpy.dot(layer_1, synapse_1))
        layer_2_error = numpy.array(class_bags) - layer_2
        #get the error
        if numpy.mean(numpy.abs(layer_2_error)) < last_mean_error:
            #print("delta after " + str(j) + " iterations:" + str(np.mean(np.abs(layer_2_error))))
            last_mean_error = numpy.mean(numpy.abs(layer_2_error))
        else:
            #print("break:", np.mean(np.abs(layer_2_error)), ">", last_mean_error)
            break

        # in what direction is the target value?
        # were we really sure? if so, don't change too much.
        layer_2_delta = layer_2_error * derivative(layer_2)
        # how much did each l1 value contribute to the l2 error (according to the weights)?
        layer_1_error = layer_2_delta.dot(synapse_1.transpose())
        # in what direction is the target l1?
        # were we really sure? if so, don't change too much.
        layer_1_delta = layer_1_error * derivative(layer_1)
        synapse_1_weight_update = (layer_1.T.dot(layer_2_delta))
        synapse_0_weight_update = (layer_0.T.dot(layer_1_delta))
        if (j > 0):
            synapse_0_direction_count += numpy.abs(
                ((synapse_0_weight_update > 0) + 0) - ((prev_synapse_0_weight_update > 0) + 0))
            synapse_1_direction_count += numpy.abs(
                ((synapse_1_weight_update > 0) + 0) - ((prev_synapse_1_weight_update > 0) + 0))

        synapse_1 += alpha * synapse_1_weight_update
        synapse_0 += alpha * synapse_0_weight_update

        prev_synapse_0_weight_update = synapse_0_weight_update
        prev_synapse_1_weight_update = synapse_1_weight_update
    synapse0=synapse_0.tolist()
    synapse1=synapse_1.tolist()

    return synapse0,synapse1
def think(sentence, layer, parent, synapse_0, synapse_1):
    y = give_word_bag(sentence.lower(), (storeSynapes.read(layer, parent))[2])
    x = y[0]
    l0 = numpy.array(x)
    # matrix multiplication of input and hidden layer

    l1 = sigmoid(numpy.dot(x, synapse_0))
    # output layer
    l2 = sigmoid(numpy.dot(l1, synapse_1))
    return l2