def test_base_mapreduce_no_parallel():
    print '\nWarning: Need run at least one shard...\n'

    logging.basicConfig(logging=logging.INFO, format="[%(levelname)s] %(message)s")
    logging.getLogger("").setLevel(logging.INFO)

    # Task file and count jobs
    task_file = 'tasks' + os.sep + 'simple_mapreduce_task.py'
    count_jobs = 4

    # Make jobs
    keys = []
    for i in range(count_jobs):
        keys.append(str(i))
    jobs = dict((key, " ") for key in keys)

    # Run
    generator = do_customize_server(
        task_file,
        jobs,
        type='simple',
        transition_process_duration=1.0)

    # Process response
    result = 0
    for i in generator:
        key, (mac, cores) = i
        result += cores

    assert result == count_jobs
def get_count_cores(task_file, count_core_roughly, transition_process_duration=0):
    # Make jobs
    keys = []
    for i in range(count_core_roughly):
        keys.append(str(i))
    jobs = dict((key, "") for key in keys)

    # Run
    generator = do_customize_server(task_file, jobs, type='simple')

    # Process response
    sum_cores = 0
    for i in generator:
        key, (mac, cores) = i
        sum_cores += cores

    return sum_cores
def run_gtests(task_file, count_cores, exe_file_name):
    def _file_to_stream(filename):
        f = open(filename, 'rb')
        try:
            return f.read()
        finally:
            f.close()
    
    stream = _file_to_stream(exe_file_name)
    
    # Create jobs
    keys = []
    timeout = 0
    for i in range(count_cores):
        keys.append(str(i)+'@'+str(count_cores)+'@'+str(timeout))
    
    jobs = dict((key, stream) for key in keys)
    return do_customize_server(task_file, jobs, type='simple')