Exemple #1
0
def verifier(transmitted_message, g):

    # 1-Doing XOR between transmitted message and generator
    r = XOR(transmitted_message, g)

    if(int(r) > 0):
        verification = "Message is transmitted incorrectly, there is an error."
    else:
        verification = "Message is transmitted correctly."

    return verification
def generator(message, g):

    # 1-Adding necessary zeros to the message
    original_message = message

    appendedZerosLength = len(g) - 1

    for x in range(appendedZerosLength):
        message = message + "0"

    # 2-Doing XOR between message and generator
    r = XOR(message, g)

    print("Part appended to message before transmitting: " + r)

    transmittedMessage = original_message + r
    return {'transmitted_message': transmittedMessage, 'r': r}
Exemple #3
0
    input_indices.append(gate[4])

# E7na hena bn3ml objects mn el classes elly 3ndna
for i in range(len(connected_gates)):

    if connected_gates[i][3] == 0:
        Gates.append(
            AND(connected_gates[i][0], connected_gates[i][1],
                connected_gates[i][2], connected_gates[i][3]))
    elif connected_gates[i][3] == 1:
        Gates.append(
            OR(connected_gates[i][0], connected_gates[i][1],
               connected_gates[i][2], connected_gates[i][3]))
    elif connected_gates[i][3] == 2:
        Gates.append(
            XOR(connected_gates[i][0], connected_gates[i][1],
                connected_gates[i][2], connected_gates[i][3]))
    elif connected_gates[i][3] == 3:
        Gates.append(
            NOT(connected_gates[i][0], connected_gates[i][2],
                connected_gates[i][3]))

# Hena bn7dd 3ndna kam input
gate_index = 0

for index in input_indices:
    if Gates[index].gtype == 3:

        if Gates[index].input1 == None:
            input_counter += 1
            starting_gates.append(gate_index)
            names_of_inputs.append(gate_type[Gates[index].gtype] + " inp1")
Exemple #4
0
def halfAdder(x1, x2):
    c = AND(x1, x2)
    s = XOR(x1, x2)
    return s, c  # return sum carryout
"""
class RnnRbm(RnnRbm):
    def __init__(self, n_hidden=150, n_hidden_recurrent=100, lr=0.001, dt=0.3):
        v, v_sample, cost, monitor, params, updates_train, v_t,updates_generate = build_rnnrbm(2, n_hidden, n_hidden_recurrent)
        self.dt = dt
        gradient = T.grad(cost, params, consider_constant=[v_sample])
        updates_train.update(((p, p - lr * g) for p, g in zip(params, gradient)))
        self.train_function = theano.function([v], monitor, updates=updates_train)
        self.generate_function = theano.function([], v_t, updates=updates_generate)
    def train(self, files, batch_size=100, num_epochs=200):

"""
###############################
####  Setting for dataset  ####
###############################
dataset = XOR(size=30, type='seq')
train_data = dataset.get_batch_design(10)
print len(train_data)

########################
####  Pre training  ####
########################
### First layer ###

rbm = RnnRbm(nvis=2, nhid=4, nhid_recurrent=3)
# rbm = RnnRbm(n_hidden=4, n_hidden_recurrent=3)
print ''

# for param in rbm.get_params():
#     print '------    ' + str(param) + '    -----'
#     print param.get_value()