def map_function(self, job_name, chunk_size, input_file, no_of_reducers, start_index): map_output = "" if job_name == Jobs.WORD_COUNT_JOB: values = splitter.read_chunk_by_word(input_file, start_index, chunk_size) engine = mapreduce.Engine(values, WordCountMap, WordCountReduce, mapreduce.Partition, no_of_reducers) Worker.engine = engine map_output = engine.map_phase() elif job_name == Jobs.SORTING_JOB: values = splitter.read_chunk_by_word(input_file, start_index, chunk_size) engine = mapreduce.Engine(values, SortingMap, SortingReduce, mapreduce.Partition, no_of_reducers) Worker.engine = engine map_output = engine.map_phase() elif job_name == Jobs.HAMMING_ENCODE_JOB: values = splitter.read_chunk(input_file, start_index, chunk_size) engine = hamming_mapreduce.Engine(values, HammingEncodingMap, HammingEncodingReduce, mapreduce.Partition, no_of_reducers, start_index) Worker.engine = engine map_output = engine.hamming_encode_map_phase() elif job_name == Jobs.HAMMING_DECODE_JOB: values = splitter.read_binary_chunk(input_file, start_index, chunk_size) engine = hamming_mapreduce.Engine(values, HammingDecodingMap, HammingDecodingReduce, mapreduce.Partition, no_of_reducers, start_index) Worker.engine = engine map_output = engine.hamming_decode_map_phase() elif job_name == Jobs.HAMMING_ERROR_JOB: values = splitter.read_binary_chunk(input_file, start_index, chunk_size) engine = hamming_mapreduce.Engine(values, HammingErrorMap, HammingErrorReduce, mapreduce.Partition, no_of_reducers, start_index) Worker.engine = engine map_output = engine.hamming_error_map_phase() elif job_name == Jobs.HAMMING_CHECK_JOB: values = splitter.read_binary_chunk(input_file, start_index, chunk_size) engine = hamming_mapreduce.Engine(values, HammingCheckMap, HammingCheckReduce, mapreduce.Partition, no_of_reducers, start_index) Worker.engine = engine map_output = engine.hamming_check_map_phase() elif job_name == Jobs.HAMMING_FIX_JOB: values = splitter.read_binary_chunk(input_file, start_index, chunk_size) engine = hamming_mapreduce.Engine(values, HammingFixMap, HammingFixReduce, mapreduce.Partition, no_of_reducers, start_index) Worker.engine = engine map_output = engine.hamming_fix_map_phase() else: print "Invalid Job Name ............." Worker.engine = engine # elif job_name == Jobs.HAMMING_ENCODE_JOB: return map_output
hamming_mapreduce.Partition, 1, 0) ## engine need index map_output = engine.hamming_encode_map_phase() encode_string = '' for partition in map_output: result_list = engine.reduce_phase(partition) for r in result_list: encode_string += r # print "------------------------------------------------------------------------------------------" # print encode_string outfile = open("encode_binary", 'w') outfile.write(encode_string) outfile.close() print "---------------------------------------------------" test_text = splitter.read_binary_chunk('encode_binary', 1, 3) print test_text engine1 = hamming_mapreduce.Engine(test_text, HammingDecodingMap, HammingDecodingReduce, hamming_mapreduce.Partition, 1, 0) map_output = engine1.hamming_decode_map_phase() encode_string = '' for partition in map_output: result_list = engine1.reduce_phase(partition) for r in result_list: print r encode_string += r # print r # print "------------------------------------------------------------------------------------------" print "---------------------------------------------------"
## engine need index map_output = engine.hamming_encode_map_phase() encode_string = '' for partition in map_output: result_list = engine.reduce_phase(partition); for r in result_list: encode_string+=r # print "------------------------------------------------------------------------------------------" # print encode_string outfile = open("encode_binary", 'w') outfile.write(encode_string) outfile.close() print "---------------------------------------------------" test_text = splitter.read_binary_chunk('encode_binary', 1, 3) print test_text engine1 = hamming_mapreduce.Engine(test_text, HammingDecodingMap,HammingDecodingReduce, hamming_mapreduce.Partition,1, 0) map_output = engine1.hamming_decode_map_phase() encode_string = '' for partition in map_output: result_list = engine1.reduce_phase(partition); for r in result_list: print r encode_string+=r # print r # print "------------------------------------------------------------------------------------------" print "---------------------------------------------------"