Example #1
0
def main(**args):

    spec = args['specification']

    objects = MultiArrayTransmitReceiveBeamplot.get_objects_from_spec(*spec)
    simulation = objects[0]
    arrays = objects[1:]

    for k, v in simulation.items():
        args.setdefault(k, v)
    for k, v in defaults.items():
        args.setdefault(k, v)

    threads = args['threads']
    file = args['file']
    rotations = args['rotation']

    mode = simulation['mesh_mode']
    v1 = np.linspace(*simulation['mesh_vector1'])
    v2 = np.linspace(*simulation['mesh_vector2'])
    v3 = np.linspace(*simulation['mesh_vector3'])

    angles = list()
    ids = list()
    dirs = list()

    for array_id, dir, a_start, a_stop, a_step in rotations:
        ids.append(array_id)
        dirs.append(dir)
        angles.append(np.arange(a_start, a_stop + a_step, a_step))

    # rules = [(id, dir, a) for id, dir, a in zip(ids, dirs, angles) for angles in zip(*angles)]
    zip_args = list()
    for id, dir, ang in zip(ids, dirs, angles):
        zip_args.append(zip(repeat(id), repeat(dir), ang))
    rotation_rules = list(zip(*zip_args))

    # check for existing file
    if os.path.isfile(file):

        # con = sql.connect(file)
        response = input(
            'File ' + str(file) + ' already exists.\n' +
            'Continue (c), overwrite (o), or do nothing (any other key)?')

        if response.lower() in ['o', 'overwrite']:

            os.remove(file)

            # determine frequencies and wavenumbers
            field_pos = sim.meshview(v1, v2, v3, mode=mode)

            # create database
            with closing(sql.connect(file)) as con:

                # create database tables
                sim.create_metadata_table(con, **args, **simulation)
                create_field_positions_table(con, field_pos)
                create_pressures_table(con)

        elif response.lower() in ['c', 'continue']:

            with closing(sql.connect(file)) as con:
                table = pd.read_sql(
                    'SELECT x, y, z FROM field_position WHERE is_complete=0',
                    con)
            field_pos = np.atleast_2d(np.array(table))

        else:
            raise Exception('Database already exists')

    else:

        # Make directories if they do not exist
        file_dir = os.path.dirname(os.path.abspath(file))
        if not os.path.exists(file_dir):
            os.makedirs(file_dir)

        # determine frequencies and wavenumbers
        field_pos = sim.meshview(v1, v2, v3, mode=mode)

        # create database
        with closing(sql.connect(file)) as con:

            # create database tables
            sim.create_metadata_table(con, **args, **simulation)
            create_field_positions_table(con, field_pos)
            create_pressures_table(con)

    try:

        # start multiprocessing pool and run process
        write_lock = multiprocessing.Lock()
        pool = multiprocessing.Pool(threads,
                                    initializer=init_process,
                                    initargs=(write_lock, ))

        simulation = abstract.dumps(simulation)
        arrays = abstract.dumps(arrays)
        proc_args = [(file, fp, rule, simulation, arrays)
                     for fp in sim.chunks(field_pos, 100)
                     for rule in rotation_rules]
        result = pool.imap_unordered(process, proc_args)

        for r in tqdm(result, desc='Simulating', total=len(proc_args)):
            pass

        pool.close()

    except Exception as e:

        print(e)
        pool.terminate()
        pool.close()
def main(**args):

    # get abstract objects from specification
    spec = args['spec']

    simulation, array = ReceiveCrosstalk.get_objects_from_spec(*spec)

    # set defaults with the following priority: command line arguments >> simulation object >> script defaults
    for k, v in simulation.items():
        args.setdefault(k, v)
        if args[k] is None:
            args[k] = v

    for k, v in defaults.items():
        args.setdefault(k, v)
        if args[k] is None:
            args[k] = v

    print('simulation parameters as key --> value:')
    for k, v in args.items():
        print(k, '-->', v)

    # get args needed in main
    c = args['sound_speed']
    file = args['file']
    threads = args['threads']
    f_start, f_stop, f_step = args['freqs']

    # create frequencies/wavenumbers
    fs = np.arange(f_start, f_stop + f_step, f_step)
    ks = 2 * np.pi * fs / c
    njobs = len(fs)
    is_complete = None
    ijob = 0

    # check for existing file
    if os.path.isfile(file):

        response = input(
            'File ' + str(file) + ' already exists.\n' +
            'Continue (c), overwrite (o), or do nothing (any other key)?')

        if response.lower() in ['o', 'overwrite']:

            os.remove(file)

            # create database
            with closing(sql.connect(file)) as con:

                # create database tables
                sim.create_metadata_table(con, **args)
                create_frequencies_table(con, fs, ks)
                sim.create_progress_table(con, njobs)

        elif response.lower() in ['c', 'continue']:
            is_complete, ijob = sim.get_progress(file)

        else:
            raise Exception('Database already exists')

    else:

        # Make directories if they do not exist
        file_dir = os.path.dirname(os.path.abspath(file))
        if not os.path.exists(file_dir):
            os.makedirs(file_dir)

        # create database
        with closing(sql.connect(file)) as con:

            # create database tables
            sim.create_metadata_table(con, **args)
            create_frequencies_table(con, fs, ks)
            sim.create_progress_table(con, njobs)

    try:

        # start multiprocessing pool and run process
        write_lock = multiprocessing.Lock()
        pool = multiprocessing.Pool(threads,
                                    initializer=init_process,
                                    initargs=(write_lock, ))

        simulation = abstract.dumps(simulation)
        array = abstract.dumps(array)
        jobs = sim.create_jobs(file, (fs, 1), (ks, 1),
                               simulation,
                               array,
                               mode='zip',
                               is_complete=is_complete)
        result = pool.imap_unordered(process, jobs)

        for r in tqdm(result, desc='Simulating', total=njobs, initial=ijob):
            pass

    except Exception as e:
        print(e)

    finally:

        pool.terminate()
        pool.close()
Example #3
0
def main(args):

    # define and parse arguments
    parser = argparse.ArgumentParser()
    parser.add_argument('file', nargs='?')
    parser.add_argument('-s', '--spec', nargs='+')
    parser.add_argument('-t', '--threads', type=int)
    parser.add_argument('-o', '--overwrite', action='store_true')
    args = vars(parser.parse_args(args))

    # get abstract objects from specification
    spec = args['spec']
    objects = TransmitBeamplot.get_objects_from_spec(*spec)
    simulation = objects[0]
    arrays = objects[1:]

    # set defaults with the following priority: command line arguments >> simulation object >> script defaults
    for k, v in simulation.items():
        args.setdefault(k, v)
        if args[k] is None:
            args[k] = v
    for k, v in defaults.items():
        args.setdefault(k, v)
        if args[k] is None:
            args[k] = v

    print('simulation parameters as key --> value:')
    for k, v in args.items():
        print(k, '-->', v)

    # get args needed in main
    file = args['file']
    threads = args['threads']
    overwrite = args['overwrite']
    mode = args['mesh_mode']
    mesh_vector1 = args['mesh_vector1']
    mesh_vector2 = args['mesh_vector2']
    mesh_vector3 = args['mesh_vector3']

    # create field positions
    field_pos = util.meshview(np.linspace(*mesh_vector1),
                              np.linspace(*mesh_vector2),
                              np.linspace(*mesh_vector3),
                              mode=mode)

    # calculate job-related values
    is_complete = None
    njobs = int(np.ceil(len(field_pos) / POSITIONS_PER_PROCESS))
    ijob = 0

    # check for existing file
    if os.path.isfile(file):

        if overwrite:  # if file exists, prompt for overwrite
            os.remove(file)  # remove existing file
            create_database(file, args, njobs, field_pos)  # create database

        else:  # continue from current progress
            is_complete, ijob = util.get_progress(file)
            if np.all(is_complete):
                return

    else:
        # Make directories if they do not exist
        file_dir = os.path.dirname(os.path.abspath(file))
        if not os.path.exists(file_dir):
            os.makedirs(file_dir)

        # create database
        create_database(file, args, njobs, field_pos)

    try:
        # start multiprocessing pool and run process
        write_lock = multiprocessing.Lock()
        simulation = abstract.dumps(simulation)
        arrays = abstract.dumps(arrays)
        pool = multiprocessing.Pool(threads,
                                    initializer=init_process,
                                    initargs=(write_lock, simulation, arrays))

        jobs = util.create_jobs(file, (field_pos, POSITIONS_PER_PROCESS),
                                mode='zip',
                                is_complete=is_complete)
        result = pool.imap_unordered(run_process, jobs)

        for r in tqdm(result, desc='Simulating', total=njobs, initial=ijob):
            pass

    except Exception as e:
        print(e)

    finally:

        pool.terminate()
        pool.close()
Example #4
0
def main(**args):

    # get abstract objects from specification
    spec = args['spec']

    objects = Solver.get_objects_from_spec(*spec)
    simulation = objects[0]
    arrays = objects[1:]

    # set defaults with the following priority: command line arguments >> simulation object >> script defaults
    for k, v in simulation.items():
        args.setdefault(k, v)
        if args[k] is None:
            args[k] = v
    for k, v in defaults.items():
        args.setdefault(k, v)
        if args[k] is None:
            args[k] = v

    print('simulation parameters as key --> value:')
    for k, v in args.items():
        print(k, '-->', v)

    # get args needed in main
    file = args['file']
    threads = args['threads']
    mode = args['mesh_mode']
    mesh_vector1 = args['mesh_vector1']
    mesh_vector2 = args['mesh_vector2']
    mesh_vector3 = args['mesh_vector3']
    rotations = args['rotations']
    a_start, a_stop, a_step = args['angles']

    # create field positions
    field_pos = util.meshview(np.linspace(*mesh_vector1), np.linspace(*mesh_vector2), np.linspace(*mesh_vector3),
                             mode=mode)

    # create angles
    angles = np.arange(a_start, a_stop + a_step, a_step)

    # create rotation rules which will be distributed by the pool
    array_ids = [id for id, _ in rotations]
    dirs = [dir for _, dir in rotations]
    zip_args = []
    for id, dir in zip(array_ids, dirs):
        zip_args.append(zip(repeat(id), repeat(dir), angles))
    rotation_rules = list(zip(*zip_args))

    # calculate job-related values
    is_complete = None
    njobs = int(np.ceil(len(field_pos) / POSITIONS_PER_PROCESS) * len(rotation_rules))
    ijob = 0

    # check for existing file
    if os.path.isfile(file):

        response = input('File ' + str(file) + ' already exists.\n' +
                         'Continue (c), overwrite (o), or do nothing (any other key)?')

        if response.lower() in ['o', 'overwrite']:  # if file exists, prompt for overwrite

            os.remove(file)  # remove existing file
            create_database(file, args, njobs, field_pos)  # create database

        elif response.lower() in ['c', 'continue']:  # continue from current progress
            is_complete, ijob = util.get_progress(file)

        else:
            raise Exception('Database already exists')

    else:

        # Make directories if they do not exist
        file_dir = os.path.dirname(os.path.abspath(file))
        if not os.path.exists(file_dir):
            os.makedirs(file_dir)

        # create database
        create_database(file, args, njobs, field_pos)

    try:

        # start multiprocessing pool and run process
        write_lock = multiprocessing.Lock()
        simulation = abstract.dumps(simulation)
        arrays = abstract.dumps(arrays)
        pool = multiprocessing.Pool(threads, initializer=init_process, initargs=(write_lock, simulation, arrays))

        jobs = util.create_jobs(file, (field_pos, POSITIONS_PER_PROCESS), (rotation_rules, 1), mode='product',
                                is_complete=is_complete)
        result = pool.imap_unordered(run_process, jobs)

        for r in tqdm(result, desc='Simulating', total=njobs, initial=ijob):
            pass

    except Exception as e:
        print(e)

    finally:

        pool.terminate()
        pool.close()
Example #5
0
def main(**args):

    threads = args['threads']
    file = args['file']
    spec = args['specification']

    simulation, array = TransmitBeamplot.get_objects_from_spec(*spec)

    mode = simulation['mesh_mode']
    v1 = np.linspace(*simulation['mesh_vector1'])
    v2 = np.linspace(*simulation['mesh_vector2'])
    v3 = np.linspace(*simulation['mesh_vector3'])

    # set arg defaults if not provided
    if 'threads' in simulation:
        threads = simulation.pop('threads')
        args['threads'] = threads

    # check for existing file
    if os.path.isfile(file):

        # con = sql.connect(file)
        response = input('File ' + str(file) + ' already exists. \nContinue (c), Overwrite (o), or Do nothing ('
                                                   'any other key)?')

        if response.lower() in ['o', 'overwrite']:

            os.remove(file)

            # determine frequencies and wavenumbers
            field_pos = sim.meshview(v1, v2, v3, mode=mode)

            # create database
            with closing(sql.connect(file)) as con:

                # create database tables
                sim.create_metadata_table(con, **args, **simulation)
                create_field_positions_table(con, field_pos)
                create_pressures_table(con)

        elif response.lower() in ['c', 'continue']:

            with closing(sql.connect(file)) as con:

                query = 'SELECT x, y, z FROM field_position WHERE is_complete=0'
                table = pd.read_sql(query, con)

            field_pos = np.atleast_2d(np.array(table))

        else:
            raise Exception('Database already exists')

    else:

        # Make directories if they do not exist
        file_dir = os.path.dirname(os.path.abspath(file))
        if not os.path.exists(file_dir):
            os.makedirs(file_dir)

        # determine frequencies and wavenumbers
        field_pos = sim.meshview(v1, v2, v3, mode=mode)

        # create database
        with closing(sql.connect(file)) as con:

            # create database tables
            sim.create_metadata_table(con, **args, **simulation)
            create_field_positions_table(con, field_pos)
            create_pressures_table(con)

    try:

        # start multiprocessing pool and run process
        write_lock = multiprocessing.Lock()
        simulation = abstract.dumps(simulation)
        array = abstract.dumps(array)

        pool = multiprocessing.Pool(threads, initializer=init_process, initargs=(write_lock, simulation, array))
        proc_args = [(file, fp) for fp in sim.chunks(field_pos, 100)]
        result = pool.imap_unordered(process, proc_args)

        for r in tqdm(result, desc='Simulating', total=len(proc_args)):
            pass

        pool.close()

    except Exception as e:

        print(e)
        pool.terminate()
        pool.close()