class Processor:
    """
    This represents the processor itself. Most work and operations
    will be outsourced to other objects referenced by the processor.
    The processor holds the values of valP, valE, valA, valB, valC,
    and rA, rB.
    """

    def __init__(self, mem_size: int = 10000):
        """
                The following is an abstraction for a bank of values
                such as valA, which will be used during each cycle.
                It's set up as an object to avoid circular import.
        """
        self.ValBank = ValBank()
        """
        The following are functional units like memory,
        registers, or flags
        """
        self.Memory = Memory(mem_size)
        self.RegisterBank = RegisterBank()
        self.ZF = CCFlag("ZF")  # zero flag
        self.OF = CCFlag("OF")  # overflow flag
        self.SF = CCFlag("SF")  # sign flag
        self.ErrorFlag = StateFlag("Error Flag", error_lib)
        self.StateFlag = StateFlag("State Flag", state_lib)
        self.ALU = ALU(self.ValBank, self.StateFlag, self.ErrorFlag, self.SF, self.OF, self.ZF)
        """
        The following are functional abstractions of operations
        that the processor performs
        """
        self.Fetcher = Fetcher(self.ValBank, self.RegisterBank, self.Memory, self.StateFlag, self.ErrorFlag)
        self.Decoder = Decoder(self.ValBank, self.RegisterBank, self.Memory)
        self.Executor = Executor(self.ValBank, self.ALU, self.OF, self.ZF, self.SF)
        self.Memorizer = Memorizer(self.ValBank, self.Memory)
        self.RegWriter = RegWriter(self.RegisterBank, self.ValBank)
        self.PCUpdater = PCUpdater(self.RegisterBank, self.ValBank)

    def run(self):
        """
        This is the only necessary public method for the processor.
        Currently the way to operate this is by manually loading values
        into the memory using the place_instruction method, then calling
        the 'run' method for the processor. Afterwards calling print
        on the memory object will reveal the finish state of the processor.
        """
        while self.StateFlag.State == 0:
            self.Fetcher.fetch()
            self.Decoder.decode()
            self.Executor.execute()
            self.Memorizer.memory_write()
            self.RegWriter.write_back()
            self.PCUpdater.update_pc()
Beispiel #2
0
def test_folder(folder):
    """ Test all images inside a folder

        Use Zbar library to test the image.

    Args:
        folder:  The path of your target folder

    Returns:
        (succ, fail, rate):  The number of success, failure, 
        and the "success rate" = (succ) / (succ + fail)

    """
    def is_img(path):
        # Add more extensions if you need
        img_ext = ['jpg', 'png', 'bmp']
        return path.split('.')[-1] in img_ext

    dc = Decoder()
    for root, folders, files in os.walk(folder):
        img_list = [os.path.join(root, file) for file in files if is_img(file)]

    succ = fail = 0
    for img in img_list:
        pil = Image.open(img).convert('L')
        code = dc.decode(pil)
        if len(code) > 0:
            succ += 1
        else:
            fail += 1
    rate = float(succ) / (succ + fail)
    return (succ, fail, rate)
    def aptidao(self, cromossomo):
        decodificacao = Decoder(self.custos, sqrt(self.TAM_CROM))
        '''
		Transforma as chaves aleatorias em binarios, verifica as restrições e 
		retorna o custo a ser minimizado
		'''
        Z = decodificacao.decode(cromossomo)
        return Z
Beispiel #4
0
def improved_process(P, codes, channels):
    improved_codes = []
    n = P.shape[1] // 3
    for c in range(len(codes) // n):
        improved_codes.append(
            np.concatenate(([codes[n * c + k] for k in range(n)])))

    # Encoding
    improved_encoder = Encoder(P)
    encodes = [improved_encoder.encode(code) for code in improved_codes]

    # Channeling
    outputs = [None] * len(channels)
    for c in range(len(channels)):
        outputs[c] = np.array(
            [channels[c].add_noise(code) for code in encodes])

    # Decoding
    improved_decoder = Decoder(P, n + 1)
    for c in range(len(channels)):
        outputs[c] = np.array(
            [improved_decoder.decode(code) for code in outputs[c]])

    return outputs
Beispiel #5
0
 def decodeRaw(inputStream, outputStream, intervalLength, options):
     Coder.checkInterval(intervalLength)
     decoder = Decoder(inputStream, outputStream, options)
     amountProcessed = 0
     while not decoder.decode(intervalLength):
         amountProcessed += intervalLength
 def decode(self, tagnumber):
     decoder = Decoder(self._string,
                       self._probrange)  ### creating a Decoder
     return decoder.decode(
         tagnumber)  ### returning the string, given the binary Tagnumber
Beispiel #7
0
                else:
                    if s.decode() == '#':
                        length_error = int(length_error, 16)
                        break
                    else:
                        length_error += s.decode()

            error = test.read(length_error)
            length_read = tmp.write(lzma.decompress(test.read(length_base)))
            tmp.write(lzma.decompress(test.read()))

        os.remove(base_path + "/tmp0")

        input("Modifica il file edit0 in {0}".format(base_path))

        with open(base_path + "/edit0", "rb") as tmp, open(base_path + "/tmp0", "wb") as test:
            test.write(hex(length_base).encode())
            test.write("#".encode())
            test.write(hex(length_error).encode())
            test.write("#".encode())

            test.write(error)
            test.write(lzma.compress(tmp.read(length_read)))
            test.write(lzma.compress(tmp.read()))

        os.remove(base_path + "/edit0")

    print("Ricostruzione file")
    decoder.decode()
    print("Ricostruzione completata.")
Beispiel #8
0
class Serialcom(threading.Thread):

	def __init__(self):
		threading.Thread.__init__(self)
		self.shutdown_flag = threading.Event()
		self.motor = Motorcontroller()
		self.buzzer = Buzzer()
		self.xbee = Xbee()
		self.decoder = Decoder()
		self.servo1 = 96
		self.servo2 = 75
		self.joycalc = Joystick()
		self.motor.setServo1(self.servo1)
		self.motor.setServo2(self.servo2)
		self.lastSavedTime = 0


	def run(self):
		print('Thread #%s started' % self.ident)
		self.motor.timeout(1)
		while not self.shutdown_flag.is_set():
			rcvdata = self.xbee.read()
			self.decoder.decode(rcvdata)
			self.motor.recalCommand()
			currenttime = time.time()
			if currenttime - self.lastSavedTime > 1.0:
				self.lastSavedTime = time.time()
				self.xbee.sendBat(self.decoder.getRfrating())
			if self.decoder.getStatus() and self.decoder.checkCRC():
				if self.decoder.getJoyStickPB1() == 0:
					self.motor.EmergyStop()
					self.buzzer.beep(300)

				elif self.decoder.getJoystickM1() > 248 and self.decoder.getJoystickM2() > 248:
					self.joycalc.calculateReg(255)
					self.motor.Motor1MC2(255 - self.joycalc.cor1)
					self.motor.Motor2MC2(255 - self.joycalc.cor2)

				elif (abs(self.decoder.getJoystickM1() - self.decoder.getJoystickM2()) <= 3) and (self.decoder.getJoystickM1() > 50):
					self.joycalc.calculateReg(self.decoder.getJoystickM1())
					self.motor.Motor1MC2(self.decoder.getJoystickM1() - self.joycalc.cor1)
					self.motor.Motor2MC2(self.decoder.getJoystickM1() - self.joycalc.cor2)
					#print "drive forward without full speed"
				else:
					self.motor.Motor1MC2(self.decoder.getJoystickM1())
					self.motor.Motor2MC2(self.decoder.getJoystickM2())
					#print "other speeds"

				if self.decoder.getJoystickPB2() == 0:
					self.servo1 = 96
					self.motor.setServo1(self.servo1)
					self.buzzer.beep(300)

				elif self.decoder.getJoystickVRX2() > 1000:
					if(self.servo1 > 0):
						self.servo1 = self.servo1 - 1
						self.motor.setServo1(self.servo1)
				elif self.decoder.getJoystickVRX2() < 24:
					if(self.servo1 < 180):
						self.servo1 = self.servo1 + 1
						self.motor.setServo1(self.servo1)

				if self.decoder.getJoystickPB2() == 0:
					self.servo2 = 75
					self.motor.setServo2(self.servo2)

				elif self.decoder.joystick_VRY2 > 1000:
					if(self.servo2 > 0):
						self.servo2 = self.servo2 - 1
						self.motor.setServo2(self.servo2)
				elif self.decoder.getJoystickVRY2() < 24:
					if(self.servo2 < 180):
						self.servo2 = self.servo2 + 1
						self.motor.setServo2(self.servo2)

			time.sleep(0.001)

		# ... Clean shutdown code here ...
		self.xbee.close()
		self.motor.close()
		print('Thread #%s stopped' % self.ident)
if len(sys.argv) < 2:
    usage()
    sys.exit(1)

# Defaults.
table = sys.argv[1]
ordering = "monotonic"
stack_size = 40
perc = 0.001

if len(sys.argv) >= 3:
    ordering = sys.argv[2]

if len(sys.argv) >= 4:
    stack_size = int(sys.argv[3])
     
if len(sys.argv) == 5:
    perc = float(sys.argv[4])

s = "das ist ein kleines haus"
d = Decoder(s, table, ordering, stack_size, perc)
d.decode()

for id, s in enumerate(d.stacks):
    print "Stack_%d = %d | " % (id, len(s)),
print ""

for pos, hypo in enumerate(d.stacks[-1].nlargest()):
    print "%d\t%s\t\t%f" % (pos, hypo.output, hypo.score)
Beispiel #10
0
def main(_):

	"""
	Run main function
	"""

	#___________________________________________Layer info_____________________________________________________
	n = FLAGS.hidden_n

	Encoder_infos = {
						"outdim":[n,n,2*n,2*n,2*n,3*n,3*n, 3*n, 3*n],\
						"kernel":[ \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
								], \
						"stride":[ \
									[1, 1], \
									[1, 1], \
									[1, 1], \
									[2, 2], \
									[1, 1], \
									[1, 1], \
									[2, 2], \
									[1, 1], \
									[1, 1], \
								], \
					} 


	Decoder_infos = {
						"outdim":[n,n,n,n,n,n,3], \
						"kernel":[ \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
								], \
						"stride":[ \
									[1, 1], \
									[1, 1], \
									[1, 1], \
									[1, 1], \
									[1, 1], \
									[1, 1], \
									[1, 1], \
								], \
					} 

	Generator_infos = {
						"outdim":[n,n,n,n,n,n,3], \
						"kernel":[ \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
									[3, 3], \
								], \
						"stride":[ \
									[1, 1], \
									[1, 1], \
									[1, 1], \
									[1, 1], \
									[1, 1], \
									[1, 1], \
									[1, 1], \
								], \
					} 


	"""
	Prepare Image Loader
	"""
	root = "./CelebA/images"
	batch_size = FLAGS.bn
	scale_size = [FLAGS.scale_h,FLAGS.scale_w]
	data_format = "NHWC"
	loader = Image_Loader(root, batch_size, scale_size, data_format, file_type="jpg")



	"""
	Make Saving Directories
	"""
	os.makedirs("./Check_Point", exist_ok=True)
	os.makedirs("./logs", exist_ok=True) # make logs directories to save summaries
	os.makedirs("./Real_Images", exist_ok=True)
	os.makedirs("./Generated_Images", exist_ok=True)
	os.makedirs("./Decoded_Generated_Images", exist_ok=True)




	#----------------------------------------------------------------------------------------------------



	#____________________________________Model composition________________________________________

	k = tf.Variable(0.0, name = "k_t", trainable = False, dtype = tf.float32) #init value of k_t = 0
	
	
	batch = loader.queue # Get image batch tensor
	image = norm_img(batch) # Normalize Imgae
	z_G = generate_z() # Sample embedding vector batch from uniform distribution
	z_D = generate_z() # Sample embedding vector batch from uniform distribution


	E = Encoder("Encoder", Encoder_infos)
	D = Decoder("Decoder", Decoder_infos)
	G = Decoder("Generator", Generator_infos)

	#Generator
	generated_image = G.decode(z_G)
	generated_image_for_disc = G.decode(z_D, reuse = True)


	#Discriminator (Auto-Encoder)	

	#image <--AutoEncoder--> reconstructed_image_real
	embedding_vector_real = E.encode(image)
	reconstructed_image_real = D.decode(embedding_vector_real)

	#generated_image_for_disc <--AutoEncoder--> reconstructed_image_fake
	embedding_vector_fake_for_disc = E.encode(generated_image_for_disc, reuse=True)
	reconstructed_image_fake_for_disc = D.decode(embedding_vector_fake_for_disc, reuse=True)

	#generated_image <--AutoEncoder--> reconstructed_image_fake
	embedding_vector_fake = E.encode(generated_image, reuse=True)
	reconstructed_image_fake = D.decode(embedding_vector_fake, reuse=True)


	#-----------------------------------------------------------------------------------------------



	#_________________________________Loss & Summary_______________________________________________


	"""
	Define Loss
	"""
	real_image_loss = get_loss(image, reconstructed_image_real)
	generator_loss_for_disc = get_loss(generated_image_for_disc, reconstructed_image_fake_for_disc)
	discriminator_loss = real_image_loss - tf.multiply(k, generator_loss_for_disc)

	generator_loss = get_loss(generated_image, reconstructed_image_fake)
	global_measure = real_image_loss + tf.abs(tf.multiply(FLAGS.gamma,real_image_loss) - generator_loss)


	"""
	Summaries
	"""
	tf.summary.scalar('Real image loss', real_image_loss)
	tf.summary.scalar('Generator loss for discriminator', generator_loss_for_disc)
	tf.summary.scalar('Discriminator loss', discriminator_loss)
	tf.summary.scalar('Generator loss', generator_loss)
	tf.summary.scalar('Global_Measure', global_measure)
	tf.summary.scalar('k_t', k)
	

	merged_summary = tf.summary.merge_all() # merege summaries, no more summaries under this line

	#-----------------------------------------------------------------------------------------------







	#_____________________________________________Train_______________________________________________

	discriminator_parameters = []
	generator_parameters = []

	for v in tf.trainable_variables():
		if 'Encoder' in v.name:
			discriminator_parameters.append(v)
			print("Discriminator parameter : ", v.name)
		elif 'Decoder' in v.name:
			discriminator_parameters.append(v)
			print("Discriminator parameter : ", v.name)			
		elif 'Generator' in v.name:
			generator_parameters.append(v)
			print("Generator parameter : ", v.name)
		else:
			print("None of Generator and Discriminator parameter : ", v.name)

	optimizer_D = tf.train.AdamOptimizer(FLAGS.lr,beta1=FLAGS.B1,beta2=FLAGS.B2).minimize(discriminator_loss,var_list=discriminator_parameters)
	optimizer_G = tf.train.AdamOptimizer(FLAGS.lr,beta1=FLAGS.B1,beta2=FLAGS.B2).minimize(generator_loss,var_list=generator_parameters)

	with tf.control_dependencies([optimizer_D, optimizer_G]):
		k_update = tf.assign(k, tf.clip_by_value(k + FLAGS.lamb * (FLAGS.gamma*real_image_loss - generator_loss), 0, 1)) #update k_t

	init = tf.global_variables_initializer()	


	NUM_THREADS=2
	config=tf.ConfigProto(inter_op_parallelism_threads=NUM_THREADS,\
						intra_op_parallelism_threads=NUM_THREADS,\
						allow_soft_placement=True,\
						device_count = {'CPU': 1},\
						)

	# config.gpu_options.per_process_gpu_memory_fraction = FLAGS.gpu_portion



	with tf.Session(config=config) as sess:

		sess.run(init) # Initialize Variables

		coord = tf.train.Coordinator() # Set Coordinator to Manage Queue Runners
		threads = tf.train.start_queue_runners(sess, coord=coord) # Set Threads
		writer = tf.summary.FileWriter('./logs', sess.graph) # add the graph to the file './logs'		

#_______________________________Restore____________________________________

		saver = tf.train.Saver(max_to_keep=1000)
		ckpt = tf.train.get_checkpoint_state(checkpoint_dir="./Check_Point")

		
		# try :	
		# 	if ckpt and ckpt.model_checkpoint_path:
		# 		print("check point path : ", ckpt.model_checkpoint_path)
		# 		saver.restore(sess, ckpt.model_checkpoint_path)	
		# 		print('Restored!')
		# except AttributeError:
		# 		print("No checkpoint")	

		Real_Images = sess.run(denorm_img(image))
		save_image(Real_Images, '{}.png'.format("./Real_Images/Real_Image"))

#---------------------------------------------------------------------------
		for t in range(FLAGS.iteration): # Mini-Batch Iteration Loop

			if coord.should_stop():
				break
			
			_, _, l_D, l_G, l_Global, k_t = sess.run([\
													optimizer_D,\
													optimizer_G,\
													discriminator_loss,\
													generator_loss,\
													global_measure,\
													k_update,\
											   		])

			print(
				 " Step : {}".format(t),
				 " Global measure of convergence : {}".format(l_Global),
				 " Generator Loss : {}".format(l_G),
				 " Discriminator Loss : {}".format(l_D),
				 " k_{} : {}".format(t,k_t) 
				 )


			

			
	       #________________________________Save____________________________________


			if t % 200 == 0:

				summary = sess.run(merged_summary)
				writer.add_summary(summary, t)


				Generated_Images, Decoded_Generated_Images = sess.run([denorm_img(generated_image), denorm_img(reconstructed_image_fake)])
				save_image(Generated_Images, '{}/{}{}.png'.format("./Generated_Images", "Generated", t))
				save_image(Decoded_Generated_Images, '{}/{}{}.png'.format("./Decoded_Generated_Images", "AutoEncoded", t))
				print("-------------------Image saved-------------------")


			if t % 500 == 0:
				print("Save model {}th".format(t))
				saver.save(sess, "./Check_Point/model.ckpt", global_step = t)


	       #--------------------------------------------------------------------
		
		writer.close()
		coord.request_stop()
		coord.join(threads)
socket = ServerUDP.initServerSocket()
print "Running and waiting..."

while True:

    try:
        
        encoded_text = ServerUDP.readSocket(socket)
        
        print "Data received!"
        
        file = open(Util.ENCODED_FILE, "w")
        
        file.write(encoded_text)
        
        file.close()
        
        print "Decoding data..."
        
        Decoder.decode()
        
        print "File successfully decoded!"
        print "\nRunning and waiting..."
        
    except Exception as e:
        print e
        print("Error!")
        
#   finally:
#       socket.close()
#       os._exit(0)
Beispiel #12
0
        decoder = Decoder(P, 3)
    else:
        encoder = EncoderHamming()
        decoder = DecoderHamming()
    channel = Channel(0.3)

    # Receiving code
    code = np.matrix([[int(x) for x in input()]])

    # Printing code
    print("Code received: ")
    print(code)
    input()

    # Encoding channel
    print("Encoding: ")
    encoded = encoder.encode(code)
    print(encoded)
    input()

    # Passing through channel
    print("Passing through channel: ")
    through_channel = np.array(channel.add_noise(np.array(encoded)[0]))
    print(through_channel)
    input()

    # Decoding code
    print("Decoding: ")
    decoded = decoder.decode(through_channel)
    print(decoded)