Exemple #1
0
 def setUp(self):
     super(TestFramework, self).setUp()
     fname = self._mkfn('foo.txt')
     stream_writer(fname, STREAM_1)
     self.stream1 = open(fname, 'r')
     fname = self._mkfn('foo2.txt')
     stream_writer(fname, STREAM_2)
     self.stream2 = open(fname, 'r')
     fname = self._mkfn('foo3.bin')
     binary_stream_writer(fname, STREAM_3)
     self.stream3 = open(fname, 'r')
Exemple #2
0
 def setUp(self):
     super(TestFramework, self).setUp()
     fname = self._mkfn('foo.txt')
     stream_writer(fname, STREAM_1)
     self.stream1 = open(fname, 'r')
     fname = self._mkfn('foo2.txt')
     stream_writer(fname, STREAM_2)
     self.stream2 = open(fname, 'r')
     fname = self._mkfn('foo3.bin')
     binary_stream_writer(fname, STREAM_3)
     self.stream3 = open(fname, 'r')
Exemple #3
0
 def _test_map_reduce_with_private_encoding_helper(self,
                                                   factory,
                                                   fast_combiner=False):
     self.stream3.close()
     cmd_file = self.stream3.name
     out_file = cmd_file + '.out'
     reduce_infile = cmd_file + '.reduce'
     reduce_outfile = reduce_infile + '.out'
     run_task(factory,
              cmd_file=cmd_file,
              private_encoding=True,
              fast_combiner=fast_combiner)
     data = {}
     with open(out_file) as f:
         bf = BinaryDownStreamFilter(f)
         for cmd, args in bf:
             if cmd == 'output':
                 data.setdefault(args[0], []).append(args[1])
     stream = []
     stream.append(('start', 0))
     stream.append(('setJobConf', ('key1', 'value1', 'key2', 'value2')))
     stream.append(('runReduce', 0, False))
     for k in data:
         stream.append(('reduceKey', k))
         for v in data[k]:
             stream.append(('reduceValue', v))
     stream.append(('close', ))
     binary_stream_writer(reduce_infile, stream)
     run_task(factory, cmd_file=reduce_infile, private_encoding=True)
     with open(reduce_outfile) as f, self._mkf('foo.out', mode='w') as o:
         bf = BinaryUpStreamDecoder(f)
         for cmd, args in bf:
             if cmd == 'progress':
                 o.write('progress\t%s\n' % args[0])
             elif cmd == 'output':
                 o.write('output\t%s\n' % '\t'.join(args))
             elif cmd == 'done':
                 o.write('done\n')
     self.check_result('foo.out', STREAM_3)
Exemple #4
0
 def _test_map_reduce_with_private_encoding_helper(self, factory,
                                                   fast_combiner=False):
     self.stream3.close()
     cmd_file = self.stream3.name
     out_file = cmd_file + '.out'
     reduce_infile = cmd_file + '.reduce'
     reduce_outfile = reduce_infile + '.out'
     run_task(factory, cmd_file=cmd_file, private_encoding=True,
              fast_combiner=fast_combiner)
     data = {}
     with open(out_file) as f:
         bf = BinaryDownStreamFilter(f)
         for cmd, args in bf:
             if cmd == 'output':
                 data.setdefault(args[0], []).append(args[1])
     stream = []
     stream.append(('start', 0))
     stream.append(('setJobConf', ('key1', 'value1', 'key2', 'value2')))
     stream.append(('runReduce', 0, False))
     for k in data:
         stream.append(('reduceKey', k))
         for v in data[k]:
             stream.append(('reduceValue', v))
     stream.append(('close',))
     binary_stream_writer(reduce_infile, stream)
     run_task(factory, cmd_file=reduce_infile, private_encoding=True)
     with open(reduce_outfile) as f, self._mkf('foo.out', mode='w') as o:
         bf = BinaryUpStreamDecoder(f)
         for cmd, args in bf:
             if cmd == 'progress':
                 o.write('progress\t%s\n' % args[0])
             elif cmd == 'output':
                 o.write('output\t%s\n' % '\t'.join(args))
             elif cmd == 'done':
                 o.write('done\n')
     self.check_result('foo.out', STREAM_3)