def verifier(transmitted_message, g): # 1-Doing XOR between transmitted message and generator r = XOR(transmitted_message, g) if(int(r) > 0): verification = "Message is transmitted incorrectly, there is an error." else: verification = "Message is transmitted correctly." return verification
def generator(message, g): # 1-Adding necessary zeros to the message original_message = message appendedZerosLength = len(g) - 1 for x in range(appendedZerosLength): message = message + "0" # 2-Doing XOR between message and generator r = XOR(message, g) print("Part appended to message before transmitting: " + r) transmittedMessage = original_message + r return {'transmitted_message': transmittedMessage, 'r': r}
input_indices.append(gate[4]) # E7na hena bn3ml objects mn el classes elly 3ndna for i in range(len(connected_gates)): if connected_gates[i][3] == 0: Gates.append( AND(connected_gates[i][0], connected_gates[i][1], connected_gates[i][2], connected_gates[i][3])) elif connected_gates[i][3] == 1: Gates.append( OR(connected_gates[i][0], connected_gates[i][1], connected_gates[i][2], connected_gates[i][3])) elif connected_gates[i][3] == 2: Gates.append( XOR(connected_gates[i][0], connected_gates[i][1], connected_gates[i][2], connected_gates[i][3])) elif connected_gates[i][3] == 3: Gates.append( NOT(connected_gates[i][0], connected_gates[i][2], connected_gates[i][3])) # Hena bn7dd 3ndna kam input gate_index = 0 for index in input_indices: if Gates[index].gtype == 3: if Gates[index].input1 == None: input_counter += 1 starting_gates.append(gate_index) names_of_inputs.append(gate_type[Gates[index].gtype] + " inp1")
sys.path.append('/home/fujikawa/lib/python/other/pylearn2/pylearn2') from pylearn2.models import mlp from pylearn2.training_algorithms import sgd from pylearn2.termination_criteria import EpochCounter from pylearn2.train import Train import numpy as np from pylearn2.datasets.transformer_dataset import TransformerDataset from pylearn2.models.autoencoder import DenoisingAutoencoder, HigherOrderContractiveAutoencoder from pylearn2.corruption import BinomialCorruptor import pylearn2.costs.autoencoder as cost_ae from XOR import XOR ############################### #### Setting for dataset #### ############################### dataset = XOR() ######################## #### Pre training #### ######################## ### First layer ### dA_1 = DenoisingAutoencoder(BinomialCorruptor(corruption_level=.2), 2, 2, act_enc='sigmoid', act_dec='linear', tied_weights=False) train_1 = Train( dataset, dA_1, algorithm=sgd.SGD(learning_rate=.05, batch_size=10, termination_criterion=EpochCounter(30), cost=cost_ae.MeanSquaredReconstructionError(), monitoring_batches=5, monitoring_dataset=dataset) ) train_1.main_loop() dA_1_out = TransformerDataset(dataset, dA_1)
""" class RnnRbm(RnnRbm): def __init__(self, n_hidden=150, n_hidden_recurrent=100, lr=0.001, dt=0.3): v, v_sample, cost, monitor, params, updates_train, v_t,updates_generate = build_rnnrbm(2, n_hidden, n_hidden_recurrent) self.dt = dt gradient = T.grad(cost, params, consider_constant=[v_sample]) updates_train.update(((p, p - lr * g) for p, g in zip(params, gradient))) self.train_function = theano.function([v], monitor, updates=updates_train) self.generate_function = theano.function([], v_t, updates=updates_generate) def train(self, files, batch_size=100, num_epochs=200): """ ############################### #### Setting for dataset #### ############################### dataset = XOR(size=30, type='seq') train_data = dataset.get_batch_design(10) print len(train_data) ######################## #### Pre training #### ######################## ### First layer ### rbm = RnnRbm(nvis=2, nhid=4, nhid_recurrent=3) # rbm = RnnRbm(n_hidden=4, n_hidden_recurrent=3) print '' # for param in rbm.get_params(): # print '------ ' + str(param) + ' -----' # print param.get_value()
def halfAdder(x1, x2): c = AND(x1, x2) s = XOR(x1, x2) return s, c # return sum carryout
# coding: utf-8 import theano, sys sys.path.append('/home/fujikawa/lib/python/other/pylearn2/pylearn2') from pylearn2.models import mlp from pylearn2.training_algorithms import sgd from pylearn2.termination_criteria import EpochCounter from pylearn2.train import Train import numpy as np from XOR import XOR ############################### #### Setting for dataset #### ############################### dataset = XOR() ########################## #### Setting for NN #### ########################## # create layers hidden_layer = mlp.Sigmoid(layer_name='hidden', dim=3, irange=.1, init_bias=1.) output_layer = mlp.Softmax(2, 'output', irange=.1) layers = [hidden_layer, output_layer] model = mlp.MLP(layers, nvis=2) #################### #### Training #### #################### train = Train( dataset, model, algorithm=sgd.SGD(learning_rate=.05, batch_size=10, termination_criterion=EpochCounter(400))
def initXor(self): new_window = XOR.XORApp(self) new_window.show()