def main_local():
    args = _args()
    time0 = time.time()

    box, numframes, cmplx = _get_info(args)
    if args.end == -1:
        args.end = numframes

    print('Box: {}'.format(box))
    print('Frames: {}'.format(numframes))

    if args.box_left and args.box_right:
        x0, y0, z0 = map(float, args.box_left.split(','))
        x1, y1, z1 = map(float, args.box_right.split(','))
        pos_cons = ((x0, y0, z0), (x1, y1, z1))
    else:
        pos_cons = ((0.0, 0.0, 0.0), tuple(box))

    # Prepare for MPI run
    nt = MPI.COMM_WORLD.size
    if nt > 1:
        num_frames = args.end - args.begin
        per_process = int(round(num_frames / float(nt)))
        print 'per_process', per_process
        frame_ranges = range(args.begin, args.end, per_process)
        frame_range_list = [
            (frame_ranges[i], frame_ranges[i+1]) for i in range(len(frame_ranges)-1)
        ]
        frame_range_list.append((frame_ranges[-1], args.end+1))  # Because it is right open range
    else:
        frame_range_list = [(args.begin, args.end)]

    print('Frame range: {}'.format(frame_range_list))
    # Q
    q_vector = np.array(replicate_list(args.vector, args.molecules, args.N, cmplx=cmplx))

    # PMI run
    import pmi
    pmi.setup()
    pmi.execfile_(__file__)
    if (args.trj.endswith('trr') or args.trj.endswith('xtc')) and SUPPORT_GROMACS:
        data = pmi.invoke(
            '_gromacs_processing',
            args, q_vector, frame_range_list, pos_cons)
    elif args.trj.endswith('h5'):
        data = pmi.invoke(
            '_h5_processing',
            args, q_vector, frame_range_list, pos_cons)
    else:
        raise RuntimeError('Wrong trajectory file')

    # Collect datas
    vectors = []
    for node_data in data:
        vectors.extend([v for v in node_data])

    filename_prefix = '' if not args.prefix else args.prefix + '_'
    file_template = '# Date: %s\n# Filename: %s\n' % (datetime.datetime.today(), args.trj)
    print('Saving data...')
    save_data('vector', vectors, args.vector, filename_prefix, file_template)
    print('Processing time: {}s with {} CPUs'.format(time.time() - time0, nt))
Exemple #2
0
if __name__ != 'pmi':
    ##################################################
    ## Serial code
    ##################################################
    import pmi

    pmi.setup()
    pmi.execfile_(__file__)

    # invoke the method on all workers and get the results
    res = pmi.invoke('hello_parallel', 'Olaf')

    print('\n'.join(res))

else:
    ##################################################
    ## Parallel code
    ##################################################
    from mpi4py import MPI

    def hello_parallel(name):
        return 'Hello %s, this is MPI task %d!' % (name, MPI.COMM_WORLD.rank)


def main_local():
    args = _args()
    time0 = time.time()

    box, numframes, cmplx, is_gromacs = _get_info(args)
    if args.end == -1:
        args.end = numframes

    print('Box: {}'.format(box))
    print('Frames: {}'.format(numframes))
    print('Scalling factor: {}'.format(args.scalling))

    if args.box_left and args.box_right:
        x0, y0, z0 = map(lambda x: float(x)*args.scalling, args.box_left.split(','))
        x1, y1, z1 = map(lambda x: float(x)*args.scalling, args.box_right.split(','))
        pos_cons = ((x0, y0, z0), (x1, y1, z1))
    else:
        pos_cons = ((0.0, 0.0, 0.0), tuple(box))
    print('Position constraints: {}'.format(pos_cons))

    # Prepare for MPI run
    nt = MPI.COMM_WORLD.size
    if nt > 1:
        num_frames = args.end - args.begin
        per_process = int(round(num_frames / float(nt)))
        print 'per_process', per_process
        frame_ranges = range(args.begin, args.end, per_process)
        frame_range_list = [
            (frame_ranges[i], frame_ranges[i+1]) for i in range(len(frame_ranges)-1)
        ]
        frame_range_list.append((frame_ranges[-1], args.end+1))  # Because it is right open range
    else:
        frame_range_list = [(args.begin, args.end)]

    print('Frame range: {}'.format(frame_range_list))
    # Q
    q_bonds = np.array(replicate_list(args.bonds, args.molecules, args.N, cmplx=cmplx))
    q_angles = np.array(replicate_list(args.angles, args.molecules, args.N, cmplx=cmplx))
    q_torsions = np.array(replicate_list(args.torsions, args.molecules, args.N, cmplx=cmplx))

    # PMI run
    import pmi
    pmi.setup()
    pmi.execfile_(__file__)
    if is_gromacs:
        data = pmi.invoke(
            '_gromacs_processing',
            args, q_bonds, q_angles, q_torsions, frame_range_list, pos_cons, box)
    else:
        data = pmi.invoke(
            '_h5_processing',
            args, q_bonds, q_angles, q_torsions, frame_range_list, pos_cons, box)

    # Collect datas
    bonds = []
    angles = []
    torsions = []
    for node_data in data:
        bonds.extend([v for v in node_data[0]])
        angles.extend([v for v in node_data[1]])
        torsions.extend([v for v in node_data[2]])

    filename_prefix = '' if not args.prefix else args.prefix + '_'
    file_template = '# Date: %s\n# Filename: %s\n' % (datetime.datetime.today(), args.trj)
    print('Saving data...')
    save_data('bond', bonds, args.bonds, filename_prefix, file_template, args.timeseries)
    save_data('angle', angles, args.angles, filename_prefix, file_template, args.timeseries)
    save_data('torsion', torsions, args.torsions, filename_prefix, file_template, args.timeseries)
    print('Processing time: {}s with {} CPUs'.format(time.time() - time0, nt))
Exemple #4
0
import pmi

pmi.setup(2)

# create the parallel function
pmi.exec_(
    """
from mpi4py import MPI

def hello_parallel(name):
    return 'Hello %s, this is MPI task %d!' % (name, MPI.COMM_WORLD.rank)
"""
)

# invoke the function on all workers and get the results as a list
res = pmi.invoke("hello_parallel", "Olaf")

print("\n".join(res))